var/home/core/zuul-output/0000755000175000017500000000000015114471057014532 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015114503027015467 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005143317015114503020017670 0ustar rootrootDec 05 05:52:03 crc systemd[1]: Starting Kubernetes Kubelet... Dec 05 05:52:03 crc restorecon[4741]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:03 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 05:52:04 crc restorecon[4741]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 05:52:04 crc restorecon[4741]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 05 05:52:04 crc kubenswrapper[4742]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 05:52:04 crc kubenswrapper[4742]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 05 05:52:04 crc kubenswrapper[4742]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 05:52:04 crc kubenswrapper[4742]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 05:52:04 crc kubenswrapper[4742]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 05 05:52:04 crc kubenswrapper[4742]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.197031 4742 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199252 4742 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199267 4742 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199272 4742 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199276 4742 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199281 4742 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199285 4742 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199289 4742 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199293 4742 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199296 4742 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199301 4742 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199305 4742 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199309 4742 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199313 4742 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199317 4742 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199321 4742 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199324 4742 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199332 4742 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199335 4742 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199339 4742 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199342 4742 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199345 4742 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199349 4742 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199352 4742 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199356 4742 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199359 4742 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199363 4742 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199366 4742 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199369 4742 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199373 4742 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199376 4742 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199379 4742 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199383 4742 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199387 4742 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199392 4742 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199395 4742 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199399 4742 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199402 4742 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199406 4742 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199409 4742 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199413 4742 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199416 4742 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199420 4742 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199424 4742 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199428 4742 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199431 4742 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199435 4742 feature_gate.go:330] unrecognized feature gate: Example Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199438 4742 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199441 4742 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199445 4742 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199449 4742 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199453 4742 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199457 4742 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199461 4742 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199464 4742 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199468 4742 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199471 4742 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199475 4742 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199478 4742 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199481 4742 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199485 4742 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199488 4742 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199492 4742 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199495 4742 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199498 4742 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199502 4742 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199505 4742 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199509 4742 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199512 4742 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199515 4742 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199520 4742 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.199524 4742 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199728 4742 flags.go:64] FLAG: --address="0.0.0.0" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199738 4742 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199744 4742 flags.go:64] FLAG: --anonymous-auth="true" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199750 4742 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199790 4742 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199794 4742 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199800 4742 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199805 4742 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199810 4742 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199814 4742 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199819 4742 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199823 4742 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199827 4742 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199831 4742 flags.go:64] FLAG: --cgroup-root="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199835 4742 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199840 4742 flags.go:64] FLAG: --client-ca-file="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199843 4742 flags.go:64] FLAG: --cloud-config="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199848 4742 flags.go:64] FLAG: --cloud-provider="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199852 4742 flags.go:64] FLAG: --cluster-dns="[]" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199858 4742 flags.go:64] FLAG: --cluster-domain="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199862 4742 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199866 4742 flags.go:64] FLAG: --config-dir="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199870 4742 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199874 4742 flags.go:64] FLAG: --container-log-max-files="5" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199879 4742 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199883 4742 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199887 4742 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199892 4742 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199896 4742 flags.go:64] FLAG: --contention-profiling="false" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199900 4742 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199904 4742 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199908 4742 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199912 4742 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199917 4742 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199921 4742 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199926 4742 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199930 4742 flags.go:64] FLAG: --enable-load-reader="false" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199934 4742 flags.go:64] FLAG: --enable-server="true" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199938 4742 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199943 4742 flags.go:64] FLAG: --event-burst="100" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199947 4742 flags.go:64] FLAG: --event-qps="50" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199951 4742 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199955 4742 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199959 4742 flags.go:64] FLAG: --eviction-hard="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199964 4742 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199968 4742 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199972 4742 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199976 4742 flags.go:64] FLAG: --eviction-soft="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199980 4742 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199984 4742 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199988 4742 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199992 4742 flags.go:64] FLAG: --experimental-mounter-path="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.199996 4742 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200001 4742 flags.go:64] FLAG: --fail-swap-on="true" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200005 4742 flags.go:64] FLAG: --feature-gates="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200009 4742 flags.go:64] FLAG: --file-check-frequency="20s" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200013 4742 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200018 4742 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200022 4742 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200027 4742 flags.go:64] FLAG: --healthz-port="10248" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200031 4742 flags.go:64] FLAG: --help="false" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200035 4742 flags.go:64] FLAG: --hostname-override="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200039 4742 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200043 4742 flags.go:64] FLAG: --http-check-frequency="20s" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200047 4742 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200064 4742 flags.go:64] FLAG: --image-credential-provider-config="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200068 4742 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200073 4742 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200078 4742 flags.go:64] FLAG: --image-service-endpoint="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200081 4742 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200085 4742 flags.go:64] FLAG: --kube-api-burst="100" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200089 4742 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200094 4742 flags.go:64] FLAG: --kube-api-qps="50" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200098 4742 flags.go:64] FLAG: --kube-reserved="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200102 4742 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200106 4742 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200110 4742 flags.go:64] FLAG: --kubelet-cgroups="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200114 4742 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200118 4742 flags.go:64] FLAG: --lock-file="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200122 4742 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200126 4742 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200131 4742 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200137 4742 flags.go:64] FLAG: --log-json-split-stream="false" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200141 4742 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200146 4742 flags.go:64] FLAG: --log-text-split-stream="false" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200150 4742 flags.go:64] FLAG: --logging-format="text" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200154 4742 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200158 4742 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200163 4742 flags.go:64] FLAG: --manifest-url="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200168 4742 flags.go:64] FLAG: --manifest-url-header="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200173 4742 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200177 4742 flags.go:64] FLAG: --max-open-files="1000000" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200183 4742 flags.go:64] FLAG: --max-pods="110" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200187 4742 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200192 4742 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200196 4742 flags.go:64] FLAG: --memory-manager-policy="None" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200200 4742 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200205 4742 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200209 4742 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200214 4742 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200223 4742 flags.go:64] FLAG: --node-status-max-images="50" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200227 4742 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200232 4742 flags.go:64] FLAG: --oom-score-adj="-999" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200236 4742 flags.go:64] FLAG: --pod-cidr="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200240 4742 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200246 4742 flags.go:64] FLAG: --pod-manifest-path="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200250 4742 flags.go:64] FLAG: --pod-max-pids="-1" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200255 4742 flags.go:64] FLAG: --pods-per-core="0" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200259 4742 flags.go:64] FLAG: --port="10250" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200264 4742 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200268 4742 flags.go:64] FLAG: --provider-id="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200272 4742 flags.go:64] FLAG: --qos-reserved="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200277 4742 flags.go:64] FLAG: --read-only-port="10255" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200281 4742 flags.go:64] FLAG: --register-node="true" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200286 4742 flags.go:64] FLAG: --register-schedulable="true" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200289 4742 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200296 4742 flags.go:64] FLAG: --registry-burst="10" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200300 4742 flags.go:64] FLAG: --registry-qps="5" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200304 4742 flags.go:64] FLAG: --reserved-cpus="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200308 4742 flags.go:64] FLAG: --reserved-memory="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200313 4742 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200317 4742 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200322 4742 flags.go:64] FLAG: --rotate-certificates="false" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200326 4742 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200330 4742 flags.go:64] FLAG: --runonce="false" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200335 4742 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200339 4742 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200343 4742 flags.go:64] FLAG: --seccomp-default="false" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200347 4742 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200352 4742 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200356 4742 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200360 4742 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200365 4742 flags.go:64] FLAG: --storage-driver-password="root" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200369 4742 flags.go:64] FLAG: --storage-driver-secure="false" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200374 4742 flags.go:64] FLAG: --storage-driver-table="stats" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200378 4742 flags.go:64] FLAG: --storage-driver-user="root" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200382 4742 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200386 4742 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200390 4742 flags.go:64] FLAG: --system-cgroups="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200394 4742 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200401 4742 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200405 4742 flags.go:64] FLAG: --tls-cert-file="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200409 4742 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200414 4742 flags.go:64] FLAG: --tls-min-version="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200418 4742 flags.go:64] FLAG: --tls-private-key-file="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200422 4742 flags.go:64] FLAG: --topology-manager-policy="none" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200426 4742 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200430 4742 flags.go:64] FLAG: --topology-manager-scope="container" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200435 4742 flags.go:64] FLAG: --v="2" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200443 4742 flags.go:64] FLAG: --version="false" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200449 4742 flags.go:64] FLAG: --vmodule="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200454 4742 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200458 4742 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200551 4742 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200556 4742 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200560 4742 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200564 4742 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200570 4742 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200573 4742 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200577 4742 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200580 4742 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200584 4742 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200588 4742 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200592 4742 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200595 4742 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200599 4742 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200603 4742 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200606 4742 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200610 4742 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200613 4742 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200617 4742 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200621 4742 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200625 4742 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200629 4742 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200632 4742 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200636 4742 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200640 4742 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200644 4742 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200649 4742 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200653 4742 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200657 4742 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200661 4742 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200666 4742 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200670 4742 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200674 4742 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200678 4742 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200682 4742 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200686 4742 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200690 4742 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200695 4742 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200698 4742 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200702 4742 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200707 4742 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200711 4742 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200715 4742 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200719 4742 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200723 4742 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200728 4742 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200732 4742 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200736 4742 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200740 4742 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200743 4742 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200747 4742 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200751 4742 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200754 4742 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200758 4742 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200761 4742 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200765 4742 feature_gate.go:330] unrecognized feature gate: Example Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200768 4742 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200772 4742 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200775 4742 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200779 4742 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200782 4742 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200787 4742 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200792 4742 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200797 4742 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200800 4742 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200804 4742 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200807 4742 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200810 4742 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200814 4742 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200819 4742 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200822 4742 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.200826 4742 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.200837 4742 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.209538 4742 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.209590 4742 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.209730 4742 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.209745 4742 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.209755 4742 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.209765 4742 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.209773 4742 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.209781 4742 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.209789 4742 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.209797 4742 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.209805 4742 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.209812 4742 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.209820 4742 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.209828 4742 feature_gate.go:330] unrecognized feature gate: Example Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.209836 4742 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.209845 4742 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.209852 4742 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.209863 4742 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.209876 4742 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.209885 4742 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.209895 4742 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.209903 4742 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.209912 4742 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.209920 4742 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.209928 4742 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.209936 4742 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.209946 4742 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.209955 4742 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.209963 4742 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.209971 4742 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.209979 4742 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.209989 4742 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.209999 4742 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210008 4742 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210016 4742 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210024 4742 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210031 4742 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210039 4742 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210047 4742 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210081 4742 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210090 4742 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210098 4742 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210105 4742 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210113 4742 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210120 4742 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210128 4742 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210136 4742 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210144 4742 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210152 4742 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210160 4742 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210168 4742 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210176 4742 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210183 4742 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210191 4742 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210198 4742 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210206 4742 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210214 4742 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210221 4742 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210229 4742 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210237 4742 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210245 4742 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210256 4742 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210267 4742 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210276 4742 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210284 4742 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210294 4742 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210301 4742 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210309 4742 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210317 4742 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210325 4742 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210333 4742 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210341 4742 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210351 4742 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.210365 4742 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210587 4742 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210600 4742 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210609 4742 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210618 4742 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210626 4742 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210634 4742 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210641 4742 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210649 4742 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210658 4742 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210667 4742 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210679 4742 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210689 4742 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210699 4742 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210707 4742 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210715 4742 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210724 4742 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210731 4742 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210739 4742 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210747 4742 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210755 4742 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210762 4742 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210770 4742 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210778 4742 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210787 4742 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210796 4742 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210804 4742 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210812 4742 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210820 4742 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210828 4742 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210836 4742 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210844 4742 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210854 4742 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210863 4742 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210871 4742 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210880 4742 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210888 4742 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210896 4742 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210906 4742 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210915 4742 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210923 4742 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210931 4742 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210938 4742 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210946 4742 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210953 4742 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210961 4742 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210969 4742 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210976 4742 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210984 4742 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210991 4742 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.210999 4742 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.211007 4742 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.211014 4742 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.211022 4742 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.211030 4742 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.211037 4742 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.211049 4742 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.211079 4742 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.211087 4742 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.211094 4742 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.211102 4742 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.211111 4742 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.211118 4742 feature_gate.go:330] unrecognized feature gate: Example Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.211127 4742 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.211135 4742 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.211142 4742 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.211150 4742 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.211157 4742 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.211165 4742 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.211172 4742 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.211180 4742 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.211187 4742 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.211200 4742 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.211509 4742 server.go:940] "Client rotation is on, will bootstrap in background" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.218307 4742 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.218475 4742 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.219270 4742 server.go:997] "Starting client certificate rotation" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.219327 4742 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.219879 4742 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-15 02:34:11.905233449 +0000 UTC Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.220003 4742 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 236h42m7.685234802s for next certificate rotation Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.226398 4742 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.228939 4742 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.238590 4742 log.go:25] "Validated CRI v1 runtime API" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.262216 4742 log.go:25] "Validated CRI v1 image API" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.264660 4742 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.268622 4742 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-05-05-47-38-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.268677 4742 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.294538 4742 manager.go:217] Machine: {Timestamp:2025-12-05 05:52:04.293042271 +0000 UTC m=+0.205177373 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654124544 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:f65f0fdf-abb3-4467-8810-c82e92a7b58d BootID:665ecdae-ddb3-49af-8a22-677c4e53c8f0 Filesystems:[{Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:b8:cd:52 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:b8:cd:52 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:1b:25:13 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:d4:62:d5 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:3f:48:b4 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:ee:93:f6 Speed:-1 Mtu:1496} {Name:ens7.23 MacAddress:52:54:00:7a:ac:1e Speed:-1 Mtu:1496} {Name:eth10 MacAddress:0a:3c:e5:30:78:80 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:9e:eb:22:3d:ef:e6 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654124544 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.294826 4742 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.295019 4742 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.295939 4742 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.296211 4742 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.296255 4742 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.296838 4742 topology_manager.go:138] "Creating topology manager with none policy" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.296861 4742 container_manager_linux.go:303] "Creating device plugin manager" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.297024 4742 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.297149 4742 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.297367 4742 state_mem.go:36] "Initialized new in-memory state store" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.297753 4742 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.298550 4742 kubelet.go:418] "Attempting to sync node with API server" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.298579 4742 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.298598 4742 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.298614 4742 kubelet.go:324] "Adding apiserver pod source" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.298628 4742 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.306995 4742 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.307129 4742 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.233:6443: connect: connection refused Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.307187 4742 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.233:6443: connect: connection refused Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.307367 4742 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 05 05:52:04 crc kubenswrapper[4742]: E1205 05:52:04.307328 4742 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.233:6443: connect: connection refused" logger="UnhandledError" Dec 05 05:52:04 crc kubenswrapper[4742]: E1205 05:52:04.307334 4742 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.233:6443: connect: connection refused" logger="UnhandledError" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.308222 4742 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.308821 4742 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.308843 4742 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.308850 4742 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.308857 4742 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.308868 4742 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.308876 4742 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.308883 4742 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.308894 4742 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.308903 4742 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.308911 4742 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.308922 4742 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.308930 4742 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.309288 4742 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.309677 4742 server.go:1280] "Started kubelet" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.310321 4742 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.233:6443: connect: connection refused Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.310480 4742 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.310484 4742 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.311318 4742 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 05 05:52:04 crc systemd[1]: Started Kubernetes Kubelet. Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.312353 4742 server.go:460] "Adding debug handlers to kubelet server" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.312628 4742 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.312684 4742 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.312809 4742 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 11:45:31.308278566 +0000 UTC Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.312898 4742 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 53h53m26.995387179s for next certificate rotation Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.313280 4742 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.313330 4742 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.313306 4742 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 05 05:52:04 crc kubenswrapper[4742]: E1205 05:52:04.313727 4742 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" interval="200ms" Dec 05 05:52:04 crc kubenswrapper[4742]: E1205 05:52:04.313497 4742 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.233:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187e3bdcfd9e01e8 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 05:52:04.309647848 +0000 UTC m=+0.221782910,LastTimestamp:2025-12-05 05:52:04.309647848 +0000 UTC m=+0.221782910,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 05:52:04 crc kubenswrapper[4742]: E1205 05:52:04.313372 4742 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.314934 4742 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.233:6443: connect: connection refused Dec 05 05:52:04 crc kubenswrapper[4742]: E1205 05:52:04.315107 4742 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.233:6443: connect: connection refused" logger="UnhandledError" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.316386 4742 factory.go:153] Registering CRI-O factory Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.316413 4742 factory.go:221] Registration of the crio container factory successfully Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.316507 4742 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.316523 4742 factory.go:55] Registering systemd factory Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.316535 4742 factory.go:221] Registration of the systemd container factory successfully Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.316574 4742 factory.go:103] Registering Raw factory Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.316597 4742 manager.go:1196] Started watching for new ooms in manager Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.317636 4742 manager.go:319] Starting recovery of all containers Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.339354 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.339458 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.339570 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.339601 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.339629 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.339657 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.339683 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.339709 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.339737 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.339762 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.339788 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.339816 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.339843 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.339875 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.339932 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.339964 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.340015 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.340041 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.340105 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.340136 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.340194 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.340222 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.340252 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.340281 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.340309 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.340359 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.340397 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.340426 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.340453 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.340479 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.340508 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.340535 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.340575 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.340599 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.340623 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.340648 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.340674 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.340700 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.340727 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.340753 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.340781 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.340808 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.340888 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.340919 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.340946 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.340972 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.340998 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.341027 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.341097 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.341128 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.341154 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.341180 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.341232 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.341265 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.341296 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.341327 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.341357 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.341382 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.341408 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.341447 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.341474 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.341577 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.341619 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.341651 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.341681 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.341710 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.341736 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.341763 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.341790 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.341817 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.341842 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.341869 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.341895 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.341922 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.341948 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.341973 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.342017 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.342045 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.342111 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.342139 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.342168 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.342194 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.342222 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.342283 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.342310 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.342338 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.342364 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.342390 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.342414 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.342438 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.342466 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.342492 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.342519 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.342545 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.342572 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.342614 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.342645 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.342671 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.342698 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.342726 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.342754 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.342793 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.342821 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.342850 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.342965 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.343005 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.343036 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.343154 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.343190 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.343241 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.343270 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.343299 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.343354 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.343382 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.343423 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.343448 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.343494 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.343548 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.343613 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.343647 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.343674 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.343700 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.343724 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.343767 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.343796 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.343823 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.343861 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.343890 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.345135 4742 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.345194 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.345244 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.345275 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.345303 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.345329 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.345357 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.345404 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.345434 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.345460 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.345487 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.345516 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.345542 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.345568 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.345608 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.345635 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.345665 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.345692 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.345719 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.345746 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.345830 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.345863 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.345889 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.345915 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.345943 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.345969 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.345994 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.346045 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.346126 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.346150 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.346179 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.346210 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.346240 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.346265 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.346295 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.346322 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.346353 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.346383 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.346433 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.346462 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.346489 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.346515 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.346543 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.346570 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.346612 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.346641 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.346672 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.346699 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.346731 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.346778 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.346818 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.346858 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.346895 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.346921 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.346964 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.346990 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.347018 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.347093 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.347124 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.347152 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.347179 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.347212 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.347251 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.347282 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.347311 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.347339 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.347366 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.347423 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.347450 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.347477 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.347503 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.347939 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.347990 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.348027 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.348098 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.348129 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.348160 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.348192 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.348222 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.348247 4742 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.348274 4742 reconstruct.go:97] "Volume reconstruction finished" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.348292 4742 reconciler.go:26] "Reconciler: start to sync state" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.363706 4742 manager.go:324] Recovery completed Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.375932 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.378752 4742 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.378994 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.379115 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.379181 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.380628 4742 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.380705 4742 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.380767 4742 state_mem.go:36] "Initialized new in-memory state store" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.381231 4742 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.381318 4742 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.381447 4742 kubelet.go:2335] "Starting kubelet main sync loop" Dec 05 05:52:04 crc kubenswrapper[4742]: E1205 05:52:04.381533 4742 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.382008 4742 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.233:6443: connect: connection refused Dec 05 05:52:04 crc kubenswrapper[4742]: E1205 05:52:04.382104 4742 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.233:6443: connect: connection refused" logger="UnhandledError" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.391190 4742 policy_none.go:49] "None policy: Start" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.392367 4742 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.392403 4742 state_mem.go:35] "Initializing new in-memory state store" Dec 05 05:52:04 crc kubenswrapper[4742]: E1205 05:52:04.414639 4742 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.445311 4742 manager.go:334] "Starting Device Plugin manager" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.445405 4742 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.445423 4742 server.go:79] "Starting device plugin registration server" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.445988 4742 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.446011 4742 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.446288 4742 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.446475 4742 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.446491 4742 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 05 05:52:04 crc kubenswrapper[4742]: E1205 05:52:04.453699 4742 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.482020 4742 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.482197 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.483593 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.483667 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.483684 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.483948 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.484224 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.484307 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.485259 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.485306 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.485325 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.485510 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.485591 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.485632 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.485648 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.485990 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.486100 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.486585 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.486616 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.486627 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.486744 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.487377 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.487813 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.487614 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.487581 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.488305 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.488338 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.488352 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.488359 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.488565 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.488686 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.488712 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.489647 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.489670 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.489679 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.489790 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.489807 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.489824 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.489842 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.489854 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.490175 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.490186 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.490194 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.490326 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.490345 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.490356 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:04 crc kubenswrapper[4742]: E1205 05:52:04.514642 4742 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" interval="400ms" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.546229 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.547441 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.547532 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.547550 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.547617 4742 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 05:52:04 crc kubenswrapper[4742]: E1205 05:52:04.548285 4742 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.233:6443: connect: connection refused" node="crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.550403 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.550493 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.550553 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.550574 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.550595 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.550662 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.550720 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.550740 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.550802 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.550821 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.550836 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.550884 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.550914 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.550970 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.550985 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.652779 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.652887 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.652949 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.652996 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.653041 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.653126 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.653167 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.653173 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.653177 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.653166 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.653209 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.653255 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.653320 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.653327 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.653327 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.653336 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.653407 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.653369 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.653437 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.653494 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.653538 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.653577 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.653627 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.653657 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.653676 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.653726 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.653742 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.653794 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.653683 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.653914 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.749402 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.751460 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.751520 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.751538 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.751571 4742 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 05:52:04 crc kubenswrapper[4742]: E1205 05:52:04.752246 4742 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.233:6443: connect: connection refused" node="crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.816842 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.823993 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.842454 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.846983 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-e0621c079890548f59a91646eddc66bfe3587a97960a33b1370957d83c2b4866 WatchSource:0}: Error finding container e0621c079890548f59a91646eddc66bfe3587a97960a33b1370957d83c2b4866: Status 404 returned error can't find the container with id e0621c079890548f59a91646eddc66bfe3587a97960a33b1370957d83c2b4866 Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.850902 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-6db473e8eda63a743f16dbd8bfb4b0f1f00aab5d170563a158fad94ae92f35b2 WatchSource:0}: Error finding container 6db473e8eda63a743f16dbd8bfb4b0f1f00aab5d170563a158fad94ae92f35b2: Status 404 returned error can't find the container with id 6db473e8eda63a743f16dbd8bfb4b0f1f00aab5d170563a158fad94ae92f35b2 Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.861617 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.865386 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-f0938a1326929b8518101cb3cd4700da5292f83669d0c2de1c80217ad4987076 WatchSource:0}: Error finding container f0938a1326929b8518101cb3cd4700da5292f83669d0c2de1c80217ad4987076: Status 404 returned error can't find the container with id f0938a1326929b8518101cb3cd4700da5292f83669d0c2de1c80217ad4987076 Dec 05 05:52:04 crc kubenswrapper[4742]: I1205 05:52:04.871479 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.904802 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-a4fb54d6380af93c4e8a308540c2eef55adf849fbc934c24f7fde50f62b50b2a WatchSource:0}: Error finding container a4fb54d6380af93c4e8a308540c2eef55adf849fbc934c24f7fde50f62b50b2a: Status 404 returned error can't find the container with id a4fb54d6380af93c4e8a308540c2eef55adf849fbc934c24f7fde50f62b50b2a Dec 05 05:52:04 crc kubenswrapper[4742]: W1205 05:52:04.905888 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-d4272d05d8a0a63b555365c062efe0c9927f72b103acb69b5c6b3ab9465c24d2 WatchSource:0}: Error finding container d4272d05d8a0a63b555365c062efe0c9927f72b103acb69b5c6b3ab9465c24d2: Status 404 returned error can't find the container with id d4272d05d8a0a63b555365c062efe0c9927f72b103acb69b5c6b3ab9465c24d2 Dec 05 05:52:04 crc kubenswrapper[4742]: E1205 05:52:04.915686 4742 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" interval="800ms" Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.153187 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.154694 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.154752 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.154767 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.154809 4742 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 05:52:05 crc kubenswrapper[4742]: E1205 05:52:05.155462 4742 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.233:6443: connect: connection refused" node="crc" Dec 05 05:52:05 crc kubenswrapper[4742]: W1205 05:52:05.194444 4742 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.233:6443: connect: connection refused Dec 05 05:52:05 crc kubenswrapper[4742]: E1205 05:52:05.194848 4742 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.233:6443: connect: connection refused" logger="UnhandledError" Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.311271 4742 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.233:6443: connect: connection refused Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.388809 4742 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d" exitCode=0 Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.388906 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d"} Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.389195 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"a4fb54d6380af93c4e8a308540c2eef55adf849fbc934c24f7fde50f62b50b2a"} Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.389400 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.390868 4742 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f" exitCode=0 Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.390922 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f"} Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.390953 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f0938a1326929b8518101cb3cd4700da5292f83669d0c2de1c80217ad4987076"} Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.391084 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.391520 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.391560 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.391576 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.393076 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.393136 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.393169 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.395509 4742 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="a3232dd64908cc76a49f558975766eaf5423bc83eeb5e4ebd6303600dd0b7bea" exitCode=0 Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.395593 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"a3232dd64908cc76a49f558975766eaf5423bc83eeb5e4ebd6303600dd0b7bea"} Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.395628 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"6db473e8eda63a743f16dbd8bfb4b0f1f00aab5d170563a158fad94ae92f35b2"} Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.395843 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.396943 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.397801 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.397830 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.397843 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.397895 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.397926 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.397942 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.398701 4742 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="8d24831966361aea37178bde47fb600caca13eccaf510a19017e7404cb801114" exitCode=0 Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.398783 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"8d24831966361aea37178bde47fb600caca13eccaf510a19017e7404cb801114"} Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.398813 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"e0621c079890548f59a91646eddc66bfe3587a97960a33b1370957d83c2b4866"} Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.398897 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.400186 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.400230 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.400251 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.401020 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb"} Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.401050 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"d4272d05d8a0a63b555365c062efe0c9927f72b103acb69b5c6b3ab9465c24d2"} Dec 05 05:52:05 crc kubenswrapper[4742]: W1205 05:52:05.606160 4742 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.233:6443: connect: connection refused Dec 05 05:52:05 crc kubenswrapper[4742]: E1205 05:52:05.606261 4742 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.233:6443: connect: connection refused" logger="UnhandledError" Dec 05 05:52:05 crc kubenswrapper[4742]: E1205 05:52:05.717578 4742 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" interval="1.6s" Dec 05 05:52:05 crc kubenswrapper[4742]: W1205 05:52:05.812356 4742 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.233:6443: connect: connection refused Dec 05 05:52:05 crc kubenswrapper[4742]: E1205 05:52:05.812460 4742 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.233:6443: connect: connection refused" logger="UnhandledError" Dec 05 05:52:05 crc kubenswrapper[4742]: W1205 05:52:05.948496 4742 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.233:6443: connect: connection refused Dec 05 05:52:05 crc kubenswrapper[4742]: E1205 05:52:05.948619 4742 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.233:6443: connect: connection refused" logger="UnhandledError" Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.956063 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.964100 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.964240 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.964278 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:05 crc kubenswrapper[4742]: I1205 05:52:05.964337 4742 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 05:52:05 crc kubenswrapper[4742]: E1205 05:52:05.965851 4742 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.233:6443: connect: connection refused" node="crc" Dec 05 05:52:06 crc kubenswrapper[4742]: I1205 05:52:06.310860 4742 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.233:6443: connect: connection refused Dec 05 05:52:06 crc kubenswrapper[4742]: I1205 05:52:06.411159 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61"} Dec 05 05:52:06 crc kubenswrapper[4742]: I1205 05:52:06.411281 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6"} Dec 05 05:52:06 crc kubenswrapper[4742]: I1205 05:52:06.411296 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b"} Dec 05 05:52:06 crc kubenswrapper[4742]: I1205 05:52:06.411308 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3"} Dec 05 05:52:06 crc kubenswrapper[4742]: I1205 05:52:06.413018 4742 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98" exitCode=0 Dec 05 05:52:06 crc kubenswrapper[4742]: I1205 05:52:06.413114 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98"} Dec 05 05:52:06 crc kubenswrapper[4742]: I1205 05:52:06.413308 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:06 crc kubenswrapper[4742]: I1205 05:52:06.414111 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:06 crc kubenswrapper[4742]: I1205 05:52:06.414136 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:06 crc kubenswrapper[4742]: I1205 05:52:06.414144 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:06 crc kubenswrapper[4742]: I1205 05:52:06.416037 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"7081c0c1a2a04736b851b2891cf22d96332e3361d93479f3fae43034a9fff212"} Dec 05 05:52:06 crc kubenswrapper[4742]: I1205 05:52:06.416118 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:06 crc kubenswrapper[4742]: I1205 05:52:06.418321 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"13e953bfbd6033682f3959815eedf4f814d275be3391564618723c3491faee3b"} Dec 05 05:52:06 crc kubenswrapper[4742]: I1205 05:52:06.418363 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"c03a8d713a4b0a06c47eb28b20328d66f0f3475b56a16ded6f429dd6648e13a7"} Dec 05 05:52:06 crc kubenswrapper[4742]: I1205 05:52:06.418374 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"f54b5f75e10a5fd9f43eec7433614b23bef72beb32ff9028155852c09d9b2e7a"} Dec 05 05:52:06 crc kubenswrapper[4742]: I1205 05:52:06.418465 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:06 crc kubenswrapper[4742]: I1205 05:52:06.418662 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:06 crc kubenswrapper[4742]: I1205 05:52:06.418679 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:06 crc kubenswrapper[4742]: I1205 05:52:06.418688 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:06 crc kubenswrapper[4742]: I1205 05:52:06.419180 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:06 crc kubenswrapper[4742]: I1205 05:52:06.419192 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:06 crc kubenswrapper[4742]: I1205 05:52:06.419234 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:06 crc kubenswrapper[4742]: I1205 05:52:06.422803 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342"} Dec 05 05:52:06 crc kubenswrapper[4742]: I1205 05:52:06.422840 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601"} Dec 05 05:52:06 crc kubenswrapper[4742]: I1205 05:52:06.422866 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588"} Dec 05 05:52:06 crc kubenswrapper[4742]: I1205 05:52:06.422968 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:06 crc kubenswrapper[4742]: I1205 05:52:06.426488 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:06 crc kubenswrapper[4742]: I1205 05:52:06.426524 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:06 crc kubenswrapper[4742]: I1205 05:52:06.426534 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:07 crc kubenswrapper[4742]: I1205 05:52:07.430627 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765"} Dec 05 05:52:07 crc kubenswrapper[4742]: I1205 05:52:07.430718 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:07 crc kubenswrapper[4742]: I1205 05:52:07.432356 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:07 crc kubenswrapper[4742]: I1205 05:52:07.432412 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:07 crc kubenswrapper[4742]: I1205 05:52:07.432429 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:07 crc kubenswrapper[4742]: I1205 05:52:07.433526 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70"} Dec 05 05:52:07 crc kubenswrapper[4742]: I1205 05:52:07.433518 4742 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70" exitCode=0 Dec 05 05:52:07 crc kubenswrapper[4742]: I1205 05:52:07.433719 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:07 crc kubenswrapper[4742]: I1205 05:52:07.433817 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:07 crc kubenswrapper[4742]: I1205 05:52:07.435159 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:07 crc kubenswrapper[4742]: I1205 05:52:07.435242 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:07 crc kubenswrapper[4742]: I1205 05:52:07.435265 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:07 crc kubenswrapper[4742]: I1205 05:52:07.436014 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:07 crc kubenswrapper[4742]: I1205 05:52:07.436268 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:07 crc kubenswrapper[4742]: I1205 05:52:07.436302 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:07 crc kubenswrapper[4742]: I1205 05:52:07.566801 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:07 crc kubenswrapper[4742]: I1205 05:52:07.568548 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:07 crc kubenswrapper[4742]: I1205 05:52:07.568588 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:07 crc kubenswrapper[4742]: I1205 05:52:07.568600 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:07 crc kubenswrapper[4742]: I1205 05:52:07.568633 4742 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 05:52:07 crc kubenswrapper[4742]: I1205 05:52:07.598382 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:52:08 crc kubenswrapper[4742]: I1205 05:52:08.200579 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:52:08 crc kubenswrapper[4742]: I1205 05:52:08.440442 4742 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 05:52:08 crc kubenswrapper[4742]: I1205 05:52:08.440520 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:08 crc kubenswrapper[4742]: I1205 05:52:08.441316 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb"} Dec 05 05:52:08 crc kubenswrapper[4742]: I1205 05:52:08.441363 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71"} Dec 05 05:52:08 crc kubenswrapper[4742]: I1205 05:52:08.441383 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb"} Dec 05 05:52:08 crc kubenswrapper[4742]: I1205 05:52:08.441502 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:08 crc kubenswrapper[4742]: I1205 05:52:08.442157 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:08 crc kubenswrapper[4742]: I1205 05:52:08.442208 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:08 crc kubenswrapper[4742]: I1205 05:52:08.442221 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:08 crc kubenswrapper[4742]: I1205 05:52:08.442499 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:08 crc kubenswrapper[4742]: I1205 05:52:08.442543 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:08 crc kubenswrapper[4742]: I1205 05:52:08.442556 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:08 crc kubenswrapper[4742]: I1205 05:52:08.642446 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:52:09 crc kubenswrapper[4742]: I1205 05:52:09.451972 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b"} Dec 05 05:52:09 crc kubenswrapper[4742]: I1205 05:52:09.452045 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:09 crc kubenswrapper[4742]: I1205 05:52:09.452083 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117"} Dec 05 05:52:09 crc kubenswrapper[4742]: I1205 05:52:09.452093 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:09 crc kubenswrapper[4742]: I1205 05:52:09.453752 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:09 crc kubenswrapper[4742]: I1205 05:52:09.453826 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:09 crc kubenswrapper[4742]: I1205 05:52:09.453853 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:09 crc kubenswrapper[4742]: I1205 05:52:09.454492 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:09 crc kubenswrapper[4742]: I1205 05:52:09.454554 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:09 crc kubenswrapper[4742]: I1205 05:52:09.454579 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:09 crc kubenswrapper[4742]: I1205 05:52:09.936461 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:52:09 crc kubenswrapper[4742]: I1205 05:52:09.936663 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:09 crc kubenswrapper[4742]: I1205 05:52:09.937885 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:09 crc kubenswrapper[4742]: I1205 05:52:09.937952 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:09 crc kubenswrapper[4742]: I1205 05:52:09.937962 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:10 crc kubenswrapper[4742]: I1205 05:52:10.036757 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 05 05:52:10 crc kubenswrapper[4742]: I1205 05:52:10.339573 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:52:10 crc kubenswrapper[4742]: I1205 05:52:10.455463 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:10 crc kubenswrapper[4742]: I1205 05:52:10.455528 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:10 crc kubenswrapper[4742]: I1205 05:52:10.457330 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:10 crc kubenswrapper[4742]: I1205 05:52:10.457438 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:10 crc kubenswrapper[4742]: I1205 05:52:10.457471 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:10 crc kubenswrapper[4742]: I1205 05:52:10.457363 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:10 crc kubenswrapper[4742]: I1205 05:52:10.457542 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:10 crc kubenswrapper[4742]: I1205 05:52:10.457565 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:10 crc kubenswrapper[4742]: I1205 05:52:10.499655 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:52:10 crc kubenswrapper[4742]: I1205 05:52:10.500100 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:10 crc kubenswrapper[4742]: I1205 05:52:10.501903 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:10 crc kubenswrapper[4742]: I1205 05:52:10.501958 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:10 crc kubenswrapper[4742]: I1205 05:52:10.501976 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:11 crc kubenswrapper[4742]: I1205 05:52:11.457753 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:11 crc kubenswrapper[4742]: I1205 05:52:11.457840 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:11 crc kubenswrapper[4742]: I1205 05:52:11.458784 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:11 crc kubenswrapper[4742]: I1205 05:52:11.458838 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:11 crc kubenswrapper[4742]: I1205 05:52:11.458860 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:11 crc kubenswrapper[4742]: I1205 05:52:11.458964 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:11 crc kubenswrapper[4742]: I1205 05:52:11.458983 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:11 crc kubenswrapper[4742]: I1205 05:52:11.458992 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:13 crc kubenswrapper[4742]: I1205 05:52:13.500260 4742 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 05:52:13 crc kubenswrapper[4742]: I1205 05:52:13.501231 4742 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 05:52:14 crc kubenswrapper[4742]: I1205 05:52:14.122388 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:52:14 crc kubenswrapper[4742]: I1205 05:52:14.122614 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:14 crc kubenswrapper[4742]: I1205 05:52:14.124488 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:14 crc kubenswrapper[4742]: I1205 05:52:14.124582 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:14 crc kubenswrapper[4742]: I1205 05:52:14.124596 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:14 crc kubenswrapper[4742]: I1205 05:52:14.133745 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:52:14 crc kubenswrapper[4742]: E1205 05:52:14.453913 4742 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 05 05:52:14 crc kubenswrapper[4742]: I1205 05:52:14.467036 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:14 crc kubenswrapper[4742]: I1205 05:52:14.469132 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:14 crc kubenswrapper[4742]: I1205 05:52:14.469210 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:14 crc kubenswrapper[4742]: I1205 05:52:14.469231 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:15 crc kubenswrapper[4742]: I1205 05:52:15.016749 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 05 05:52:15 crc kubenswrapper[4742]: I1205 05:52:15.016986 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:15 crc kubenswrapper[4742]: I1205 05:52:15.018640 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:15 crc kubenswrapper[4742]: I1205 05:52:15.018702 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:15 crc kubenswrapper[4742]: I1205 05:52:15.018728 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:15 crc kubenswrapper[4742]: I1205 05:52:15.483667 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 05:52:15 crc kubenswrapper[4742]: I1205 05:52:15.483940 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:15 crc kubenswrapper[4742]: I1205 05:52:15.485553 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:15 crc kubenswrapper[4742]: I1205 05:52:15.485658 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:15 crc kubenswrapper[4742]: I1205 05:52:15.485689 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:16 crc kubenswrapper[4742]: E1205 05:52:16.798941 4742 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": net/http: TLS handshake timeout" event="&Event{ObjectMeta:{crc.187e3bdcfd9e01e8 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 05:52:04.309647848 +0000 UTC m=+0.221782910,LastTimestamp:2025-12-05 05:52:04.309647848 +0000 UTC m=+0.221782910,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 05:52:17 crc kubenswrapper[4742]: I1205 05:52:17.312125 4742 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 05 05:52:17 crc kubenswrapper[4742]: E1205 05:52:17.318461 4742 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" interval="3.2s" Dec 05 05:52:17 crc kubenswrapper[4742]: I1205 05:52:17.487598 4742 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 05 05:52:17 crc kubenswrapper[4742]: I1205 05:52:17.488014 4742 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 05 05:52:17 crc kubenswrapper[4742]: I1205 05:52:17.519202 4742 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 05 05:52:17 crc kubenswrapper[4742]: I1205 05:52:17.519277 4742 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 05 05:52:19 crc kubenswrapper[4742]: I1205 05:52:19.941870 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:52:19 crc kubenswrapper[4742]: I1205 05:52:19.942471 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:19 crc kubenswrapper[4742]: I1205 05:52:19.943579 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:19 crc kubenswrapper[4742]: I1205 05:52:19.943630 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:19 crc kubenswrapper[4742]: I1205 05:52:19.943642 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:20 crc kubenswrapper[4742]: I1205 05:52:20.346308 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:52:20 crc kubenswrapper[4742]: I1205 05:52:20.346475 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:20 crc kubenswrapper[4742]: I1205 05:52:20.347477 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:20 crc kubenswrapper[4742]: I1205 05:52:20.347507 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:20 crc kubenswrapper[4742]: I1205 05:52:20.347516 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:20 crc kubenswrapper[4742]: I1205 05:52:20.351433 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:52:20 crc kubenswrapper[4742]: I1205 05:52:20.481542 4742 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 05:52:20 crc kubenswrapper[4742]: I1205 05:52:20.481587 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:20 crc kubenswrapper[4742]: I1205 05:52:20.482406 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:20 crc kubenswrapper[4742]: I1205 05:52:20.482467 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:20 crc kubenswrapper[4742]: I1205 05:52:20.482486 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:22 crc kubenswrapper[4742]: I1205 05:52:22.504222 4742 trace.go:236] Trace[1771000576]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 05:52:08.733) (total time: 13770ms): Dec 05 05:52:22 crc kubenswrapper[4742]: Trace[1771000576]: ---"Objects listed" error: 13770ms (05:52:22.504) Dec 05 05:52:22 crc kubenswrapper[4742]: Trace[1771000576]: [13.770695716s] [13.770695716s] END Dec 05 05:52:22 crc kubenswrapper[4742]: I1205 05:52:22.504270 4742 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 05 05:52:22 crc kubenswrapper[4742]: I1205 05:52:22.506578 4742 trace.go:236] Trace[1607387273]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 05:52:07.939) (total time: 14566ms): Dec 05 05:52:22 crc kubenswrapper[4742]: Trace[1607387273]: ---"Objects listed" error: 14566ms (05:52:22.506) Dec 05 05:52:22 crc kubenswrapper[4742]: Trace[1607387273]: [14.566588976s] [14.566588976s] END Dec 05 05:52:22 crc kubenswrapper[4742]: I1205 05:52:22.506603 4742 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 05 05:52:22 crc kubenswrapper[4742]: I1205 05:52:22.507051 4742 trace.go:236] Trace[1904381720]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 05:52:08.913) (total time: 13593ms): Dec 05 05:52:22 crc kubenswrapper[4742]: Trace[1904381720]: ---"Objects listed" error: 13593ms (05:52:22.507) Dec 05 05:52:22 crc kubenswrapper[4742]: Trace[1904381720]: [13.593249073s] [13.593249073s] END Dec 05 05:52:22 crc kubenswrapper[4742]: I1205 05:52:22.507087 4742 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 05 05:52:22 crc kubenswrapper[4742]: E1205 05:52:22.724731 4742 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 05 05:52:22 crc kubenswrapper[4742]: I1205 05:52:22.725521 4742 trace.go:236] Trace[2012755450]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 05:52:08.088) (total time: 14636ms): Dec 05 05:52:22 crc kubenswrapper[4742]: Trace[2012755450]: ---"Objects listed" error: 14636ms (05:52:22.725) Dec 05 05:52:22 crc kubenswrapper[4742]: Trace[2012755450]: [14.636566649s] [14.636566649s] END Dec 05 05:52:22 crc kubenswrapper[4742]: I1205 05:52:22.725569 4742 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 05 05:52:22 crc kubenswrapper[4742]: I1205 05:52:22.726778 4742 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 05 05:52:22 crc kubenswrapper[4742]: I1205 05:52:22.763978 4742 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:51308->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 05 05:52:22 crc kubenswrapper[4742]: I1205 05:52:22.764000 4742 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:51322->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 05 05:52:22 crc kubenswrapper[4742]: I1205 05:52:22.764044 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:51308->192.168.126.11:17697: read: connection reset by peer" Dec 05 05:52:22 crc kubenswrapper[4742]: I1205 05:52:22.764077 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:51322->192.168.126.11:17697: read: connection reset by peer" Dec 05 05:52:22 crc kubenswrapper[4742]: I1205 05:52:22.764615 4742 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 05 05:52:22 crc kubenswrapper[4742]: I1205 05:52:22.764642 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.311362 4742 apiserver.go:52] "Watching apiserver" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.317793 4742 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.318011 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-dns/node-resolver-wh7m2","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c"] Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.318288 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:52:23 crc kubenswrapper[4742]: E1205 05:52:23.318352 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.318362 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.318362 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:52:23 crc kubenswrapper[4742]: E1205 05:52:23.318553 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.318605 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.318672 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.318674 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 05:52:23 crc kubenswrapper[4742]: E1205 05:52:23.318686 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.318879 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-wh7m2" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.320375 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.320698 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.326590 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.329370 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.329579 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.332553 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.333738 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.333752 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.333763 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.334991 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.335078 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.335217 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.384963 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.396697 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.407416 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.414680 4742 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.420497 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.430230 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.430290 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.430322 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.430342 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.430360 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.430378 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.430393 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.430408 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.430424 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.430443 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.430462 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.430479 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.430494 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.430512 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.430666 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.430709 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.430689 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.430727 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.430744 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.430699 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.430728 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.430693 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.430762 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 05:52:23 crc kubenswrapper[4742]: E1205 05:52:23.430847 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:52:23.930825384 +0000 UTC m=+19.842960506 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.430839 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.430879 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.430853 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.430910 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.430919 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.430964 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.430991 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431021 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431047 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431105 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431128 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431146 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431161 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431162 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431186 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431213 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431236 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431252 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431259 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431299 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431318 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431343 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431359 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431359 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431376 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431393 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431411 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431426 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431442 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431458 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431473 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431487 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431503 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431520 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431537 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431553 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431567 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431586 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431603 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431617 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431631 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431646 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431662 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431676 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431691 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431706 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431721 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431736 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431750 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431766 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431780 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431796 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431812 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431827 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431843 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431858 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431872 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431887 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431901 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431940 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431974 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431988 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432002 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432017 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432030 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432044 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432073 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432087 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432106 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432122 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432138 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432153 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432166 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432183 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432198 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432213 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432228 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432242 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432274 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432290 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432306 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432321 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432336 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432353 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432368 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432384 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432400 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432415 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432430 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432446 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432462 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432478 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432493 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432508 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432523 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432540 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432555 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432570 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432586 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432602 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432617 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432632 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432649 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432664 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432680 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432696 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432712 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432729 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432744 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432762 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432779 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432796 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432811 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432826 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432841 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432862 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432876 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432892 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432908 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432924 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432939 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432955 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432969 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432986 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433001 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433016 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433032 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433049 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433082 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433101 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433117 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433133 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433148 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433163 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433181 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433197 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433250 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433267 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433288 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433311 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433332 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433353 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433376 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433593 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433615 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433637 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433659 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433677 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433694 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433719 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433737 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433754 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433771 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433787 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433803 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433819 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433835 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433853 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433889 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433905 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433922 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433938 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433956 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433974 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433990 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434007 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434023 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434039 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434055 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434095 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434121 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434141 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434158 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434174 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434194 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434211 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434228 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434246 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434264 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434280 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434298 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434315 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434331 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434348 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434390 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/56d21615-e900-43cf-9aa3-753144dbf53f-hosts-file\") pod \"node-resolver-wh7m2\" (UID: \"56d21615-e900-43cf-9aa3-753144dbf53f\") " pod="openshift-dns/node-resolver-wh7m2" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434430 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434457 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434479 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434502 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434520 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434540 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434559 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434577 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434613 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dn57b\" (UniqueName: \"kubernetes.io/projected/56d21615-e900-43cf-9aa3-753144dbf53f-kube-api-access-dn57b\") pod \"node-resolver-wh7m2\" (UID: \"56d21615-e900-43cf-9aa3-753144dbf53f\") " pod="openshift-dns/node-resolver-wh7m2" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434632 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434652 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434669 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434689 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434707 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434727 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434777 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434789 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434805 4742 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434816 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434825 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434836 4742 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434846 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434856 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434866 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434876 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434886 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.438529 4742 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.440829 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.441548 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.448769 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.457311 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.468704 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431422 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431491 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.469953 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431544 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431583 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431660 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431723 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.431828 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432007 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432179 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432334 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432474 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432532 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432580 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432846 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432863 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432965 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.432990 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433171 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433179 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433248 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433595 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433613 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433690 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433782 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.433905 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434051 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434112 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434200 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434294 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434348 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434380 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434481 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434546 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434627 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434668 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434701 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434771 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434928 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.434956 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.435089 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.435276 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.435286 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.435344 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.435484 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.435548 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.435656 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.435765 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.435811 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.435976 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.436005 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.436106 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.436191 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.436251 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.436323 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.437252 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.437313 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.437336 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.437389 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.437555 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.437639 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.470316 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.437665 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.437772 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.469944 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.437787 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.437807 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.437859 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.438627 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.439080 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.470447 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.437756 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.439314 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.439519 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.439528 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.439615 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.439816 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.439910 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.439959 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.439868 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.470561 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.440156 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.440392 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.440482 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.440834 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.440963 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.441003 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: E1205 05:52:23.441230 4742 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.441273 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.441290 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.441410 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.441658 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.441815 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.441757 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.441890 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.442109 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.442185 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.442697 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.442824 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.443003 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.443210 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.443410 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.443747 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.443858 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.443873 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.444043 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.444090 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.444294 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.444312 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.444394 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.444599 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.444598 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.444992 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.445085 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.445242 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.445273 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.445526 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.445588 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.445601 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.445783 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.445927 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.446001 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: E1205 05:52:23.450642 4742 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.464350 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.464409 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.464577 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.465672 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.465833 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.465986 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: E1205 05:52:23.467277 4742 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.468272 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.468292 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.467946 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.468680 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.469005 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.469249 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.469257 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.469718 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.469773 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.470201 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.439228 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: E1205 05:52:23.470666 4742 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.470916 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: E1205 05:52:23.470930 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 05:52:23.970814169 +0000 UTC m=+19.882949231 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 05:52:23 crc kubenswrapper[4742]: E1205 05:52:23.470926 4742 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 05:52:23 crc kubenswrapper[4742]: E1205 05:52:23.470963 4742 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:52:23 crc kubenswrapper[4742]: E1205 05:52:23.470993 4742 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 05:52:23 crc kubenswrapper[4742]: E1205 05:52:23.471004 4742 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:52:23 crc kubenswrapper[4742]: E1205 05:52:23.471020 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 05:52:23.971003394 +0000 UTC m=+19.883138546 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.470760 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.471130 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: E1205 05:52:23.471158 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 05:52:23.971147788 +0000 UTC m=+19.883282920 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 05:52:23 crc kubenswrapper[4742]: E1205 05:52:23.471196 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 05:52:23.971188489 +0000 UTC m=+19.883323661 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.471214 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.471312 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.471317 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.471433 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.471628 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.471713 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.471873 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.471907 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.472036 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.472189 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.472593 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.472737 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.472915 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.473095 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.473174 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.473353 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.473427 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.473662 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.475078 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.476951 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.477118 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.476974 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.477228 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.477133 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.477579 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.478634 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.478752 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.479041 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.480444 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.480607 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.480731 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.481358 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.481669 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.483910 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.487438 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.488358 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.489164 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.489214 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.489380 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.489771 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.490459 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.490628 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.490720 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.490787 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.490884 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.491754 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.491772 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.501869 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.506651 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.508308 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.517969 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.519942 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.527597 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.528712 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.532511 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536120 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536166 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dn57b\" (UniqueName: \"kubernetes.io/projected/56d21615-e900-43cf-9aa3-753144dbf53f-kube-api-access-dn57b\") pod \"node-resolver-wh7m2\" (UID: \"56d21615-e900-43cf-9aa3-753144dbf53f\") " pod="openshift-dns/node-resolver-wh7m2" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536221 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/56d21615-e900-43cf-9aa3-753144dbf53f-hosts-file\") pod \"node-resolver-wh7m2\" (UID: \"56d21615-e900-43cf-9aa3-753144dbf53f\") " pod="openshift-dns/node-resolver-wh7m2" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536255 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536313 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536328 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536340 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536351 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536362 4742 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536373 4742 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536383 4742 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536395 4742 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536406 4742 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536416 4742 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536426 4742 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536437 4742 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536449 4742 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536461 4742 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536472 4742 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536483 4742 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536496 4742 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536507 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536521 4742 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536532 4742 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536544 4742 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536555 4742 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536566 4742 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536578 4742 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536588 4742 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536599 4742 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536610 4742 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536622 4742 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536634 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536645 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536657 4742 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536666 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536668 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536709 4742 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536720 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536737 4742 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536748 4742 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536757 4742 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536766 4742 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536774 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536783 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536792 4742 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536800 4742 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536809 4742 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536819 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536827 4742 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536836 4742 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536845 4742 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536853 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536862 4742 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536871 4742 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536880 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536888 4742 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536898 4742 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536907 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536918 4742 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536928 4742 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536936 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536944 4742 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536954 4742 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536962 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536971 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536980 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536989 4742 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536998 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537006 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537015 4742 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537024 4742 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537032 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537041 4742 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537049 4742 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537075 4742 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537083 4742 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537091 4742 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537100 4742 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537109 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537117 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537125 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537134 4742 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537142 4742 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537151 4742 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537159 4742 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537169 4742 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537177 4742 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537187 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537196 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537204 4742 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537213 4742 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537222 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537230 4742 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536645 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/56d21615-e900-43cf-9aa3-753144dbf53f-hosts-file\") pod \"node-resolver-wh7m2\" (UID: \"56d21615-e900-43cf-9aa3-753144dbf53f\") " pod="openshift-dns/node-resolver-wh7m2" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537238 4742 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537280 4742 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537298 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537308 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537316 4742 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537324 4742 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537333 4742 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537341 4742 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537349 4742 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537364 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537372 4742 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537380 4742 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537388 4742 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537396 4742 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537405 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537413 4742 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537421 4742 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537428 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537436 4742 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537444 4742 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537453 4742 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537461 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537471 4742 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537479 4742 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537487 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537495 4742 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537503 4742 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537511 4742 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537521 4742 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537532 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537544 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537557 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537569 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537579 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537598 4742 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537609 4742 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537621 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537642 4742 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537654 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537665 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537676 4742 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537689 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537701 4742 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537712 4742 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537724 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537735 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537747 4742 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537759 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537771 4742 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537782 4742 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537794 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537805 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537817 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537828 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537852 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537864 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537875 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537888 4742 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537898 4742 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537906 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537914 4742 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537922 4742 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537930 4742 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537940 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537948 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537956 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537965 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537974 4742 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537981 4742 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537990 4742 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.537998 4742 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.538010 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.538018 4742 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.538026 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.538033 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.538042 4742 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.538071 4742 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.538083 4742 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.538093 4742 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.538101 4742 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.538110 4742 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.538121 4742 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.538130 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.538139 4742 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.538147 4742 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.538155 4742 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.538163 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.538171 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.538178 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.538186 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.538195 4742 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.538203 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.538211 4742 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.538219 4742 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.536608 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.545337 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.551458 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dn57b\" (UniqueName: \"kubernetes.io/projected/56d21615-e900-43cf-9aa3-753144dbf53f-kube-api-access-dn57b\") pod \"node-resolver-wh7m2\" (UID: \"56d21615-e900-43cf-9aa3-753144dbf53f\") " pod="openshift-dns/node-resolver-wh7m2" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.554606 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.560673 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.569682 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.579726 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.587485 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.597643 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.598323 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-7q8lw"] Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.598810 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.599024 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-2gbwd"] Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.599709 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-776bt"] Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.599841 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.599965 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.600049 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.600617 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.600629 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.601018 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.601307 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.601326 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.601332 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.601635 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.601750 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.602345 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.602989 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.604136 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.608924 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.617355 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.628453 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.636366 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.637382 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.638575 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/3fc0b032-e995-4d0f-b5e7-600b880849f5-rootfs\") pod \"machine-config-daemon-7q8lw\" (UID: \"3fc0b032-e995-4d0f-b5e7-600b880849f5\") " pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.638602 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-multus-cni-dir\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.638618 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/130d4974-9fb6-4cdb-b115-56d2a96b1438-cni-binary-copy\") pod \"multus-additional-cni-plugins-2gbwd\" (UID: \"130d4974-9fb6-4cdb-b115-56d2a96b1438\") " pod="openshift-multus/multus-additional-cni-plugins-2gbwd" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.638641 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-cnibin\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.638656 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-host-run-k8s-cni-cncf-io\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.638671 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/130d4974-9fb6-4cdb-b115-56d2a96b1438-tuning-conf-dir\") pod \"multus-additional-cni-plugins-2gbwd\" (UID: \"130d4974-9fb6-4cdb-b115-56d2a96b1438\") " pod="openshift-multus/multus-additional-cni-plugins-2gbwd" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.638693 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-host-var-lib-cni-bin\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.638720 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-host-run-netns\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.638734 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-multus-conf-dir\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.638748 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/130d4974-9fb6-4cdb-b115-56d2a96b1438-os-release\") pod \"multus-additional-cni-plugins-2gbwd\" (UID: \"130d4974-9fb6-4cdb-b115-56d2a96b1438\") " pod="openshift-multus/multus-additional-cni-plugins-2gbwd" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.638769 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/39641a18-5d13-441f-9956-3777b9f27703-multus-daemon-config\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.638783 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/130d4974-9fb6-4cdb-b115-56d2a96b1438-cnibin\") pod \"multus-additional-cni-plugins-2gbwd\" (UID: \"130d4974-9fb6-4cdb-b115-56d2a96b1438\") " pod="openshift-multus/multus-additional-cni-plugins-2gbwd" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.638800 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rsm25\" (UniqueName: \"kubernetes.io/projected/3fc0b032-e995-4d0f-b5e7-600b880849f5-kube-api-access-rsm25\") pod \"machine-config-daemon-7q8lw\" (UID: \"3fc0b032-e995-4d0f-b5e7-600b880849f5\") " pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.638816 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-host-var-lib-kubelet\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.638829 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-hostroot\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.638844 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-host-run-multus-certs\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.638858 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-etc-kubernetes\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.638871 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/130d4974-9fb6-4cdb-b115-56d2a96b1438-system-cni-dir\") pod \"multus-additional-cni-plugins-2gbwd\" (UID: \"130d4974-9fb6-4cdb-b115-56d2a96b1438\") " pod="openshift-multus/multus-additional-cni-plugins-2gbwd" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.638885 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gcqvd\" (UniqueName: \"kubernetes.io/projected/39641a18-5d13-441f-9956-3777b9f27703-kube-api-access-gcqvd\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.638911 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3fc0b032-e995-4d0f-b5e7-600b880849f5-proxy-tls\") pod \"machine-config-daemon-7q8lw\" (UID: \"3fc0b032-e995-4d0f-b5e7-600b880849f5\") " pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.638950 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3fc0b032-e995-4d0f-b5e7-600b880849f5-mcd-auth-proxy-config\") pod \"machine-config-daemon-7q8lw\" (UID: \"3fc0b032-e995-4d0f-b5e7-600b880849f5\") " pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.639010 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/39641a18-5d13-441f-9956-3777b9f27703-cni-binary-copy\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.639031 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-system-cni-dir\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.639056 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-multus-socket-dir-parent\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.639097 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvh2f\" (UniqueName: \"kubernetes.io/projected/130d4974-9fb6-4cdb-b115-56d2a96b1438-kube-api-access-vvh2f\") pod \"multus-additional-cni-plugins-2gbwd\" (UID: \"130d4974-9fb6-4cdb-b115-56d2a96b1438\") " pod="openshift-multus/multus-additional-cni-plugins-2gbwd" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.639120 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-host-var-lib-cni-multus\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.639145 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-os-release\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.639163 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/130d4974-9fb6-4cdb-b115-56d2a96b1438-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-2gbwd\" (UID: \"130d4974-9fb6-4cdb-b115-56d2a96b1438\") " pod="openshift-multus/multus-additional-cni-plugins-2gbwd" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.644238 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.644471 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 05:52:23 crc kubenswrapper[4742]: W1205 05:52:23.654516 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-fb6299a052156a71a549e27010e12077ce583d7c8675b7fedd5a063b70e751d2 WatchSource:0}: Error finding container fb6299a052156a71a549e27010e12077ce583d7c8675b7fedd5a063b70e751d2: Status 404 returned error can't find the container with id fb6299a052156a71a549e27010e12077ce583d7c8675b7fedd5a063b70e751d2 Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.654734 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.655032 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.665659 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-wh7m2" Dec 05 05:52:23 crc kubenswrapper[4742]: W1205 05:52:23.672837 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-9597c49a4bce912d10e7f4813031cb4df0d949d60dd1165f007a10aba0df6147 WatchSource:0}: Error finding container 9597c49a4bce912d10e7f4813031cb4df0d949d60dd1165f007a10aba0df6147: Status 404 returned error can't find the container with id 9597c49a4bce912d10e7f4813031cb4df0d949d60dd1165f007a10aba0df6147 Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.674356 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.700278 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.718375 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.730241 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"9597c49a4bce912d10e7f4813031cb4df0d949d60dd1165f007a10aba0df6147"} Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.735209 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.738961 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"fb6299a052156a71a549e27010e12077ce583d7c8675b7fedd5a063b70e751d2"} Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.739832 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-hostroot\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.739850 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-host-run-multus-certs\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.739867 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-etc-kubernetes\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.739881 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/130d4974-9fb6-4cdb-b115-56d2a96b1438-system-cni-dir\") pod \"multus-additional-cni-plugins-2gbwd\" (UID: \"130d4974-9fb6-4cdb-b115-56d2a96b1438\") " pod="openshift-multus/multus-additional-cni-plugins-2gbwd" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.739896 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rsm25\" (UniqueName: \"kubernetes.io/projected/3fc0b032-e995-4d0f-b5e7-600b880849f5-kube-api-access-rsm25\") pod \"machine-config-daemon-7q8lw\" (UID: \"3fc0b032-e995-4d0f-b5e7-600b880849f5\") " pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.739911 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-host-var-lib-kubelet\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.739924 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gcqvd\" (UniqueName: \"kubernetes.io/projected/39641a18-5d13-441f-9956-3777b9f27703-kube-api-access-gcqvd\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.739939 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3fc0b032-e995-4d0f-b5e7-600b880849f5-mcd-auth-proxy-config\") pod \"machine-config-daemon-7q8lw\" (UID: \"3fc0b032-e995-4d0f-b5e7-600b880849f5\") " pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.739954 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/39641a18-5d13-441f-9956-3777b9f27703-cni-binary-copy\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.739975 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3fc0b032-e995-4d0f-b5e7-600b880849f5-proxy-tls\") pod \"machine-config-daemon-7q8lw\" (UID: \"3fc0b032-e995-4d0f-b5e7-600b880849f5\") " pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.739990 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-system-cni-dir\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.740005 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-multus-socket-dir-parent\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.740019 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvh2f\" (UniqueName: \"kubernetes.io/projected/130d4974-9fb6-4cdb-b115-56d2a96b1438-kube-api-access-vvh2f\") pod \"multus-additional-cni-plugins-2gbwd\" (UID: \"130d4974-9fb6-4cdb-b115-56d2a96b1438\") " pod="openshift-multus/multus-additional-cni-plugins-2gbwd" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.740036 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-host-var-lib-cni-multus\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.740050 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-os-release\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.740084 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/130d4974-9fb6-4cdb-b115-56d2a96b1438-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-2gbwd\" (UID: \"130d4974-9fb6-4cdb-b115-56d2a96b1438\") " pod="openshift-multus/multus-additional-cni-plugins-2gbwd" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.740099 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-multus-cni-dir\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.740114 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/130d4974-9fb6-4cdb-b115-56d2a96b1438-cni-binary-copy\") pod \"multus-additional-cni-plugins-2gbwd\" (UID: \"130d4974-9fb6-4cdb-b115-56d2a96b1438\") " pod="openshift-multus/multus-additional-cni-plugins-2gbwd" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.740136 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/3fc0b032-e995-4d0f-b5e7-600b880849f5-rootfs\") pod \"machine-config-daemon-7q8lw\" (UID: \"3fc0b032-e995-4d0f-b5e7-600b880849f5\") " pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.740150 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-cnibin\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.740165 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/130d4974-9fb6-4cdb-b115-56d2a96b1438-tuning-conf-dir\") pod \"multus-additional-cni-plugins-2gbwd\" (UID: \"130d4974-9fb6-4cdb-b115-56d2a96b1438\") " pod="openshift-multus/multus-additional-cni-plugins-2gbwd" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.740179 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-host-run-k8s-cni-cncf-io\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.740192 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-host-var-lib-cni-bin\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.740211 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-host-run-netns\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.740234 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-multus-conf-dir\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.740248 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/130d4974-9fb6-4cdb-b115-56d2a96b1438-os-release\") pod \"multus-additional-cni-plugins-2gbwd\" (UID: \"130d4974-9fb6-4cdb-b115-56d2a96b1438\") " pod="openshift-multus/multus-additional-cni-plugins-2gbwd" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.740264 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/130d4974-9fb6-4cdb-b115-56d2a96b1438-cnibin\") pod \"multus-additional-cni-plugins-2gbwd\" (UID: \"130d4974-9fb6-4cdb-b115-56d2a96b1438\") " pod="openshift-multus/multus-additional-cni-plugins-2gbwd" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.740286 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/39641a18-5d13-441f-9956-3777b9f27703-multus-daemon-config\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.740552 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-host-var-lib-cni-multus\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.740595 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-etc-kubernetes\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.740644 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-hostroot\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.740669 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-host-run-multus-certs\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.740779 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/130d4974-9fb6-4cdb-b115-56d2a96b1438-system-cni-dir\") pod \"multus-additional-cni-plugins-2gbwd\" (UID: \"130d4974-9fb6-4cdb-b115-56d2a96b1438\") " pod="openshift-multus/multus-additional-cni-plugins-2gbwd" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.740844 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-system-cni-dir\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.740931 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-host-var-lib-kubelet\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.740939 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/39641a18-5d13-441f-9956-3777b9f27703-multus-daemon-config\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.741004 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-os-release\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.741084 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-multus-socket-dir-parent\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.741146 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3fc0b032-e995-4d0f-b5e7-600b880849f5-mcd-auth-proxy-config\") pod \"machine-config-daemon-7q8lw\" (UID: \"3fc0b032-e995-4d0f-b5e7-600b880849f5\") " pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.741353 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/39641a18-5d13-441f-9956-3777b9f27703-cni-binary-copy\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.741386 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-host-run-k8s-cni-cncf-io\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.741393 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/3fc0b032-e995-4d0f-b5e7-600b880849f5-rootfs\") pod \"machine-config-daemon-7q8lw\" (UID: \"3fc0b032-e995-4d0f-b5e7-600b880849f5\") " pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.741426 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-cnibin\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.741510 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-multus-cni-dir\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.741510 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/130d4974-9fb6-4cdb-b115-56d2a96b1438-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-2gbwd\" (UID: \"130d4974-9fb6-4cdb-b115-56d2a96b1438\") " pod="openshift-multus/multus-additional-cni-plugins-2gbwd" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.741536 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-host-var-lib-cni-bin\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.741565 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-host-run-netns\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.741587 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/39641a18-5d13-441f-9956-3777b9f27703-multus-conf-dir\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.741627 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/130d4974-9fb6-4cdb-b115-56d2a96b1438-os-release\") pod \"multus-additional-cni-plugins-2gbwd\" (UID: \"130d4974-9fb6-4cdb-b115-56d2a96b1438\") " pod="openshift-multus/multus-additional-cni-plugins-2gbwd" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.741650 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/130d4974-9fb6-4cdb-b115-56d2a96b1438-cnibin\") pod \"multus-additional-cni-plugins-2gbwd\" (UID: \"130d4974-9fb6-4cdb-b115-56d2a96b1438\") " pod="openshift-multus/multus-additional-cni-plugins-2gbwd" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.741662 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/130d4974-9fb6-4cdb-b115-56d2a96b1438-tuning-conf-dir\") pod \"multus-additional-cni-plugins-2gbwd\" (UID: \"130d4974-9fb6-4cdb-b115-56d2a96b1438\") " pod="openshift-multus/multus-additional-cni-plugins-2gbwd" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.741885 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/130d4974-9fb6-4cdb-b115-56d2a96b1438-cni-binary-copy\") pod \"multus-additional-cni-plugins-2gbwd\" (UID: \"130d4974-9fb6-4cdb-b115-56d2a96b1438\") " pod="openshift-multus/multus-additional-cni-plugins-2gbwd" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.742497 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"1f4f4994d940a66b13b795016edc22eaab13ad49334f990bb70ee51048262452"} Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.746921 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3fc0b032-e995-4d0f-b5e7-600b880849f5-proxy-tls\") pod \"machine-config-daemon-7q8lw\" (UID: \"3fc0b032-e995-4d0f-b5e7-600b880849f5\") " pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.750542 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.752989 4742 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765" exitCode=255 Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.753466 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765"} Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.754876 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: E1205 05:52:23.759630 4742 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.762544 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rsm25\" (UniqueName: \"kubernetes.io/projected/3fc0b032-e995-4d0f-b5e7-600b880849f5-kube-api-access-rsm25\") pod \"machine-config-daemon-7q8lw\" (UID: \"3fc0b032-e995-4d0f-b5e7-600b880849f5\") " pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.762544 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gcqvd\" (UniqueName: \"kubernetes.io/projected/39641a18-5d13-441f-9956-3777b9f27703-kube-api-access-gcqvd\") pod \"multus-776bt\" (UID: \"39641a18-5d13-441f-9956-3777b9f27703\") " pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.762622 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvh2f\" (UniqueName: \"kubernetes.io/projected/130d4974-9fb6-4cdb-b115-56d2a96b1438-kube-api-access-vvh2f\") pod \"multus-additional-cni-plugins-2gbwd\" (UID: \"130d4974-9fb6-4cdb-b115-56d2a96b1438\") " pod="openshift-multus/multus-additional-cni-plugins-2gbwd" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.764851 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.777960 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.788024 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.804326 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.818913 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.827674 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.830736 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.831884 4742 scope.go:117] "RemoveContainer" containerID="62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.839608 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.857501 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.866940 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.876324 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.885893 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.904518 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.912509 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.919452 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.919963 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.927460 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-776bt" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.936922 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.941797 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:52:23 crc kubenswrapper[4742]: E1205 05:52:23.941965 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:52:24.941950253 +0000 UTC m=+20.854085315 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:52:23 crc kubenswrapper[4742]: W1205 05:52:23.942360 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3fc0b032_e995_4d0f_b5e7_600b880849f5.slice/crio-f037711655b55007b273bc12f4e5e9b4f3f6a6603903bc55328310326550e969 WatchSource:0}: Error finding container f037711655b55007b273bc12f4e5e9b4f3f6a6603903bc55328310326550e969: Status 404 returned error can't find the container with id f037711655b55007b273bc12f4e5e9b4f3f6a6603903bc55328310326550e969 Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.949941 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.983580 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-m9jc4"] Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.984392 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.986342 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.986512 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.986803 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 05 05:52:23 crc kubenswrapper[4742]: W1205 05:52:23.987046 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod130d4974_9fb6_4cdb_b115_56d2a96b1438.slice/crio-726cf343c4550c472bdc7eba5dcfb2612d4c24f55a04de11a0ea523d62b37a2b WatchSource:0}: Error finding container 726cf343c4550c472bdc7eba5dcfb2612d4c24f55a04de11a0ea523d62b37a2b: Status 404 returned error can't find the container with id 726cf343c4550c472bdc7eba5dcfb2612d4c24f55a04de11a0ea523d62b37a2b Dec 05 05:52:23 crc kubenswrapper[4742]: I1205 05:52:23.994765 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.013840 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.034311 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.042445 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.042617 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b82wp\" (UniqueName: \"kubernetes.io/projected/06ddc689-50f2-409f-9ac8-8f6a1bed0831-kube-api-access-b82wp\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.042788 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-cni-bin\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: E1205 05:52:24.042707 4742 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.042921 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-node-log\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: E1205 05:52:24.043004 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 05:52:25.042979961 +0000 UTC m=+20.955115083 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.043147 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.043225 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.043301 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-run-netns\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.043365 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-run-ovn\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: E1205 05:52:24.043495 4742 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 05:52:24 crc kubenswrapper[4742]: E1205 05:52:24.043527 4742 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 05:52:24 crc kubenswrapper[4742]: E1205 05:52:24.043541 4742 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:52:24 crc kubenswrapper[4742]: E1205 05:52:24.043602 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 05:52:25.043577607 +0000 UTC m=+20.955712869 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.043932 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/06ddc689-50f2-409f-9ac8-8f6a1bed0831-ovnkube-script-lib\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.044043 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/06ddc689-50f2-409f-9ac8-8f6a1bed0831-ovnkube-config\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.044207 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-etc-openvswitch\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.044293 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-run-openvswitch\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.044378 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.044446 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-run-systemd\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.044513 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/06ddc689-50f2-409f-9ac8-8f6a1bed0831-ovn-node-metrics-cert\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.044576 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-kubelet\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.044640 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-slash\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.044791 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-log-socket\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.044867 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.044935 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-systemd-units\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: E1205 05:52:24.044680 4742 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 05:52:24 crc kubenswrapper[4742]: E1205 05:52:24.045220 4742 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 05:52:24 crc kubenswrapper[4742]: E1205 05:52:24.045250 4742 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:52:24 crc kubenswrapper[4742]: E1205 05:52:24.045046 4742 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 05:52:24 crc kubenswrapper[4742]: E1205 05:52:24.045311 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 05:52:25.045297013 +0000 UTC m=+20.957432265 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 05:52:24 crc kubenswrapper[4742]: E1205 05:52:24.045333 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 05:52:25.045323224 +0000 UTC m=+20.957458516 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.045133 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-var-lib-openvswitch\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.045562 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/06ddc689-50f2-409f-9ac8-8f6a1bed0831-env-overrides\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.045636 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-run-ovn-kubernetes\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.045700 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-cni-netd\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.057383 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.100032 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.131790 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.146757 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.146805 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-run-netns\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.146821 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-run-ovn\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.146837 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/06ddc689-50f2-409f-9ac8-8f6a1bed0831-ovnkube-config\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.146851 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/06ddc689-50f2-409f-9ac8-8f6a1bed0831-ovnkube-script-lib\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.146865 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-etc-openvswitch\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.146878 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-run-openvswitch\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.146892 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-run-systemd\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.146913 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/06ddc689-50f2-409f-9ac8-8f6a1bed0831-ovn-node-metrics-cert\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.146928 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-kubelet\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.146941 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-slash\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.146954 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-log-socket\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.146968 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-systemd-units\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.146989 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-var-lib-openvswitch\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.147003 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/06ddc689-50f2-409f-9ac8-8f6a1bed0831-env-overrides\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.147016 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-run-ovn-kubernetes\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.147032 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-cni-netd\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.147054 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b82wp\" (UniqueName: \"kubernetes.io/projected/06ddc689-50f2-409f-9ac8-8f6a1bed0831-kube-api-access-b82wp\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.147090 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-cni-bin\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.147104 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-node-log\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.147161 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-node-log\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.147194 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.147220 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-run-netns\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.147261 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-run-ovn\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.147786 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/06ddc689-50f2-409f-9ac8-8f6a1bed0831-ovnkube-config\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.148468 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/06ddc689-50f2-409f-9ac8-8f6a1bed0831-ovnkube-script-lib\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.148501 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-etc-openvswitch\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.148521 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-run-openvswitch\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.148544 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-run-systemd\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.149101 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-var-lib-openvswitch\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.149143 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-kubelet\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.149166 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-slash\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.149193 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-log-socket\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.149219 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-systemd-units\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.149273 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-cni-netd\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.149582 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/06ddc689-50f2-409f-9ac8-8f6a1bed0831-env-overrides\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.149612 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-run-ovn-kubernetes\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.149725 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-cni-bin\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.155173 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/06ddc689-50f2-409f-9ac8-8f6a1bed0831-ovn-node-metrics-cert\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.165491 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.198571 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b82wp\" (UniqueName: \"kubernetes.io/projected/06ddc689-50f2-409f-9ac8-8f6a1bed0831-kube-api-access-b82wp\") pod \"ovnkube-node-m9jc4\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.236027 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.263758 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.298479 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.299499 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:24 crc kubenswrapper[4742]: W1205 05:52:24.318598 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod06ddc689_50f2_409f_9ac8_8f6a1bed0831.slice/crio-61fd67b2c6e7a4d2aa2ef4aecffc4bcd9a0db0ca9c87d712b659ad7f5450e9cd WatchSource:0}: Error finding container 61fd67b2c6e7a4d2aa2ef4aecffc4bcd9a0db0ca9c87d712b659ad7f5450e9cd: Status 404 returned error can't find the container with id 61fd67b2c6e7a4d2aa2ef4aecffc4bcd9a0db0ca9c87d712b659ad7f5450e9cd Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.342906 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.381566 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.385800 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.386489 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.387685 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.388363 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.389407 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.389886 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.390522 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.391432 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.392034 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.392952 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.393496 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.394576 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.395099 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.395573 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.399494 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.399980 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.400961 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.401418 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.402033 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.403024 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.403477 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.406117 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.406562 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.407826 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.408529 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.409991 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.410656 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.414138 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.414887 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.415988 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.416494 4742 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.416593 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.418716 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.419249 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.419672 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.421457 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.422496 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.423035 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.424023 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.424667 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.425775 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.426350 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.427320 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.428762 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.429620 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.430225 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.430778 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.431535 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.433237 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.433787 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.434311 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.435199 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.435790 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.436799 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.437309 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.473340 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.518608 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.546769 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.584034 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.646455 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.662584 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.703917 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.749244 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.758698 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.761098 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d"} Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.761485 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.772534 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5"} Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.773273 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283"} Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.793258 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa"} Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.794815 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-776bt" event={"ID":"39641a18-5d13-441f-9956-3777b9f27703","Type":"ContainerStarted","Data":"0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8"} Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.794838 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-776bt" event={"ID":"39641a18-5d13-441f-9956-3777b9f27703","Type":"ContainerStarted","Data":"97b07c129018ab7e43375391037abb7ea74aacb39680da4f7e143662135bca72"} Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.809346 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.809532 4742 generic.go:334] "Generic (PLEG): container finished" podID="130d4974-9fb6-4cdb-b115-56d2a96b1438" containerID="83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c" exitCode=0 Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.809606 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" event={"ID":"130d4974-9fb6-4cdb-b115-56d2a96b1438","Type":"ContainerDied","Data":"83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c"} Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.809632 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" event={"ID":"130d4974-9fb6-4cdb-b115-56d2a96b1438","Type":"ContainerStarted","Data":"726cf343c4550c472bdc7eba5dcfb2612d4c24f55a04de11a0ea523d62b37a2b"} Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.828223 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerStarted","Data":"818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a"} Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.828269 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerStarted","Data":"26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4"} Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.828280 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerStarted","Data":"f037711655b55007b273bc12f4e5e9b4f3f6a6603903bc55328310326550e969"} Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.843851 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-wh7m2" event={"ID":"56d21615-e900-43cf-9aa3-753144dbf53f","Type":"ContainerStarted","Data":"0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c"} Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.843899 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-wh7m2" event={"ID":"56d21615-e900-43cf-9aa3-753144dbf53f","Type":"ContainerStarted","Data":"dfddfad0f2da1eb9d921a3405d52185f07217fa5e5b2fdf3701c9aa80b6014d9"} Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.856888 4742 generic.go:334] "Generic (PLEG): container finished" podID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerID="4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5" exitCode=0 Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.857382 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" event={"ID":"06ddc689-50f2-409f-9ac8-8f6a1bed0831","Type":"ContainerDied","Data":"4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5"} Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.857453 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" event={"ID":"06ddc689-50f2-409f-9ac8-8f6a1bed0831","Type":"ContainerStarted","Data":"61fd67b2c6e7a4d2aa2ef4aecffc4bcd9a0db0ca9c87d712b659ad7f5450e9cd"} Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.862560 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.893083 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.912013 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.945018 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.960798 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:52:24 crc kubenswrapper[4742]: E1205 05:52:24.960942 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:52:26.960923404 +0000 UTC m=+22.873058456 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:52:24 crc kubenswrapper[4742]: I1205 05:52:24.990858 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.030649 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.047104 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.062109 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.062165 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.062215 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.062249 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:25 crc kubenswrapper[4742]: E1205 05:52:25.062309 4742 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 05:52:25 crc kubenswrapper[4742]: E1205 05:52:25.062309 4742 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 05:52:25 crc kubenswrapper[4742]: E1205 05:52:25.062355 4742 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 05:52:25 crc kubenswrapper[4742]: E1205 05:52:25.062365 4742 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 05:52:25 crc kubenswrapper[4742]: E1205 05:52:25.062382 4742 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:52:25 crc kubenswrapper[4742]: E1205 05:52:25.062359 4742 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 05:52:25 crc kubenswrapper[4742]: E1205 05:52:25.062411 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 05:52:27.062394494 +0000 UTC m=+22.974529556 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 05:52:25 crc kubenswrapper[4742]: E1205 05:52:25.062337 4742 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 05:52:25 crc kubenswrapper[4742]: E1205 05:52:25.062429 4742 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:52:25 crc kubenswrapper[4742]: E1205 05:52:25.062443 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 05:52:27.062423785 +0000 UTC m=+22.974558907 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 05:52:25 crc kubenswrapper[4742]: E1205 05:52:25.062477 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 05:52:27.062469766 +0000 UTC m=+22.974604918 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:52:25 crc kubenswrapper[4742]: E1205 05:52:25.062496 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 05:52:27.062489986 +0000 UTC m=+22.974625148 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.070913 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.072075 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.092154 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.127227 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.170865 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.200554 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.250288 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.284405 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.325594 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.366185 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.382846 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.382900 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:52:25 crc kubenswrapper[4742]: E1205 05:52:25.382976 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.382864 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:52:25 crc kubenswrapper[4742]: E1205 05:52:25.383073 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:52:25 crc kubenswrapper[4742]: E1205 05:52:25.383141 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.403336 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.447853 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.486606 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.524262 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.532046 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-ttdt8"] Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.535011 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-ttdt8" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.553048 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.567023 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rglxc\" (UniqueName: \"kubernetes.io/projected/e578c028-99f7-4a07-91cb-58ff75f25dcd-kube-api-access-rglxc\") pod \"node-ca-ttdt8\" (UID: \"e578c028-99f7-4a07-91cb-58ff75f25dcd\") " pod="openshift-image-registry/node-ca-ttdt8" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.567117 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/e578c028-99f7-4a07-91cb-58ff75f25dcd-serviceca\") pod \"node-ca-ttdt8\" (UID: \"e578c028-99f7-4a07-91cb-58ff75f25dcd\") " pod="openshift-image-registry/node-ca-ttdt8" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.567142 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e578c028-99f7-4a07-91cb-58ff75f25dcd-host\") pod \"node-ca-ttdt8\" (UID: \"e578c028-99f7-4a07-91cb-58ff75f25dcd\") " pod="openshift-image-registry/node-ca-ttdt8" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.573824 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.592744 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.613836 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.642944 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.667976 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rglxc\" (UniqueName: \"kubernetes.io/projected/e578c028-99f7-4a07-91cb-58ff75f25dcd-kube-api-access-rglxc\") pod \"node-ca-ttdt8\" (UID: \"e578c028-99f7-4a07-91cb-58ff75f25dcd\") " pod="openshift-image-registry/node-ca-ttdt8" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.668043 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/e578c028-99f7-4a07-91cb-58ff75f25dcd-serviceca\") pod \"node-ca-ttdt8\" (UID: \"e578c028-99f7-4a07-91cb-58ff75f25dcd\") " pod="openshift-image-registry/node-ca-ttdt8" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.668107 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e578c028-99f7-4a07-91cb-58ff75f25dcd-host\") pod \"node-ca-ttdt8\" (UID: \"e578c028-99f7-4a07-91cb-58ff75f25dcd\") " pod="openshift-image-registry/node-ca-ttdt8" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.668201 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e578c028-99f7-4a07-91cb-58ff75f25dcd-host\") pod \"node-ca-ttdt8\" (UID: \"e578c028-99f7-4a07-91cb-58ff75f25dcd\") " pod="openshift-image-registry/node-ca-ttdt8" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.669599 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/e578c028-99f7-4a07-91cb-58ff75f25dcd-serviceca\") pod \"node-ca-ttdt8\" (UID: \"e578c028-99f7-4a07-91cb-58ff75f25dcd\") " pod="openshift-image-registry/node-ca-ttdt8" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.683618 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.715237 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rglxc\" (UniqueName: \"kubernetes.io/projected/e578c028-99f7-4a07-91cb-58ff75f25dcd-kube-api-access-rglxc\") pod \"node-ca-ttdt8\" (UID: \"e578c028-99f7-4a07-91cb-58ff75f25dcd\") " pod="openshift-image-registry/node-ca-ttdt8" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.744185 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.787512 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.820439 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.860568 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.862742 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" event={"ID":"06ddc689-50f2-409f-9ac8-8f6a1bed0831","Type":"ContainerStarted","Data":"de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10"} Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.862794 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" event={"ID":"06ddc689-50f2-409f-9ac8-8f6a1bed0831","Type":"ContainerStarted","Data":"9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409"} Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.862850 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" event={"ID":"06ddc689-50f2-409f-9ac8-8f6a1bed0831","Type":"ContainerStarted","Data":"992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787"} Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.862861 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" event={"ID":"06ddc689-50f2-409f-9ac8-8f6a1bed0831","Type":"ContainerStarted","Data":"c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689"} Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.862878 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" event={"ID":"06ddc689-50f2-409f-9ac8-8f6a1bed0831","Type":"ContainerStarted","Data":"7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d"} Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.862895 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" event={"ID":"06ddc689-50f2-409f-9ac8-8f6a1bed0831","Type":"ContainerStarted","Data":"00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a"} Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.863940 4742 generic.go:334] "Generic (PLEG): container finished" podID="130d4974-9fb6-4cdb-b115-56d2a96b1438" containerID="37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8" exitCode=0 Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.864130 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" event={"ID":"130d4974-9fb6-4cdb-b115-56d2a96b1438","Type":"ContainerDied","Data":"37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8"} Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.902363 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.914013 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-ttdt8" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.925479 4742 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.929616 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.929647 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.929657 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.929765 4742 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.943518 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:25Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.994021 4742 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.994269 4742 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.995351 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.995383 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.995393 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.995448 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:25 crc kubenswrapper[4742]: I1205 05:52:25.995461 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:25Z","lastTransitionTime":"2025-12-05T05:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:26 crc kubenswrapper[4742]: E1205 05:52:26.016929 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.022680 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.022716 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.022727 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.022742 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.022753 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:26Z","lastTransitionTime":"2025-12-05T05:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.027339 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:26 crc kubenswrapper[4742]: W1205 05:52:26.028666 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode578c028_99f7_4a07_91cb_58ff75f25dcd.slice/crio-c7a0bcd8df20452c7904f081b4b76461f10986f64b807320a8d2ed989b8dc604 WatchSource:0}: Error finding container c7a0bcd8df20452c7904f081b4b76461f10986f64b807320a8d2ed989b8dc604: Status 404 returned error can't find the container with id c7a0bcd8df20452c7904f081b4b76461f10986f64b807320a8d2ed989b8dc604 Dec 05 05:52:26 crc kubenswrapper[4742]: E1205 05:52:26.037572 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.041193 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.041227 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.041235 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.041248 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.041257 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:26Z","lastTransitionTime":"2025-12-05T05:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:26 crc kubenswrapper[4742]: E1205 05:52:26.054430 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.058181 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.058227 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.058244 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.058261 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.058285 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:26Z","lastTransitionTime":"2025-12-05T05:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.068200 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:26 crc kubenswrapper[4742]: E1205 05:52:26.071535 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.075135 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.075186 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.075197 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.075216 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.075227 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:26Z","lastTransitionTime":"2025-12-05T05:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:26 crc kubenswrapper[4742]: E1205 05:52:26.086022 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:26 crc kubenswrapper[4742]: E1205 05:52:26.086156 4742 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.087640 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.087671 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.087682 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.087698 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.087734 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:26Z","lastTransitionTime":"2025-12-05T05:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.102363 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.141766 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.182649 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.189996 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.190045 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.190095 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.190117 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.190132 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:26Z","lastTransitionTime":"2025-12-05T05:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.223164 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.262931 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.292266 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.292304 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.292318 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.292335 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.292348 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:26Z","lastTransitionTime":"2025-12-05T05:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.301612 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.342292 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.386926 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.394616 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.394655 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.394667 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.394682 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.394695 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:26Z","lastTransitionTime":"2025-12-05T05:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.424225 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.461349 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.497698 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.497760 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.497779 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.497804 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.497822 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:26Z","lastTransitionTime":"2025-12-05T05:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.504679 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.547964 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.581263 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.599995 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.600032 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.600052 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.600085 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.600096 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:26Z","lastTransitionTime":"2025-12-05T05:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.619115 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.659028 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.702043 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.702083 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.702093 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.702105 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.702114 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:26Z","lastTransitionTime":"2025-12-05T05:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.704193 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.742508 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.781515 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.805114 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.805162 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.805176 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.805193 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.805207 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:26Z","lastTransitionTime":"2025-12-05T05:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.829759 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.861964 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.868402 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-ttdt8" event={"ID":"e578c028-99f7-4a07-91cb-58ff75f25dcd","Type":"ContainerStarted","Data":"06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6"} Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.868454 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-ttdt8" event={"ID":"e578c028-99f7-4a07-91cb-58ff75f25dcd","Type":"ContainerStarted","Data":"c7a0bcd8df20452c7904f081b4b76461f10986f64b807320a8d2ed989b8dc604"} Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.869997 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73"} Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.872082 4742 generic.go:334] "Generic (PLEG): container finished" podID="130d4974-9fb6-4cdb-b115-56d2a96b1438" containerID="cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c" exitCode=0 Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.872086 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" event={"ID":"130d4974-9fb6-4cdb-b115-56d2a96b1438","Type":"ContainerDied","Data":"cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c"} Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.903351 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.907490 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.907515 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.907523 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.907535 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.907544 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:26Z","lastTransitionTime":"2025-12-05T05:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.959891 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.979321 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:52:26 crc kubenswrapper[4742]: E1205 05:52:26.979944 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:52:30.979925095 +0000 UTC m=+26.892060157 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:52:26 crc kubenswrapper[4742]: I1205 05:52:26.986276 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:26Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.011034 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.011103 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.011113 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.011131 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.011143 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:27Z","lastTransitionTime":"2025-12-05T05:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.020362 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.065257 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.080309 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.080351 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.080373 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.080397 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:52:27 crc kubenswrapper[4742]: E1205 05:52:27.080418 4742 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 05:52:27 crc kubenswrapper[4742]: E1205 05:52:27.080498 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 05:52:31.0804784 +0000 UTC m=+26.992613472 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 05:52:27 crc kubenswrapper[4742]: E1205 05:52:27.080500 4742 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 05:52:27 crc kubenswrapper[4742]: E1205 05:52:27.080530 4742 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 05:52:27 crc kubenswrapper[4742]: E1205 05:52:27.080538 4742 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 05:52:27 crc kubenswrapper[4742]: E1205 05:52:27.080545 4742 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:52:27 crc kubenswrapper[4742]: E1205 05:52:27.080578 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 05:52:31.080565873 +0000 UTC m=+26.992700935 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 05:52:27 crc kubenswrapper[4742]: E1205 05:52:27.080604 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 05:52:31.080595013 +0000 UTC m=+26.992730085 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:52:27 crc kubenswrapper[4742]: E1205 05:52:27.080624 4742 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 05:52:27 crc kubenswrapper[4742]: E1205 05:52:27.080635 4742 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 05:52:27 crc kubenswrapper[4742]: E1205 05:52:27.080645 4742 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:52:27 crc kubenswrapper[4742]: E1205 05:52:27.080667 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 05:52:31.080660815 +0000 UTC m=+26.992795877 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.104190 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.112709 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.112746 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.112756 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.112768 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.112777 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:27Z","lastTransitionTime":"2025-12-05T05:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.142524 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.181041 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.214338 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.214533 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.214592 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.214649 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.214708 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:27Z","lastTransitionTime":"2025-12-05T05:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.226005 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.259570 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.301652 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.316801 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.316830 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.316837 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.316850 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.316859 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:27Z","lastTransitionTime":"2025-12-05T05:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.343545 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.381936 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.382001 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:52:27 crc kubenswrapper[4742]: E1205 05:52:27.382133 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:52:27 crc kubenswrapper[4742]: E1205 05:52:27.382307 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.382439 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:52:27 crc kubenswrapper[4742]: E1205 05:52:27.382660 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.388298 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.419703 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.419939 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.420010 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.420090 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.420155 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:27Z","lastTransitionTime":"2025-12-05T05:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.427552 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.464871 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.514702 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.522326 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.522368 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.522377 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.522391 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.522400 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:27Z","lastTransitionTime":"2025-12-05T05:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.547056 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.582245 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.619371 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.625609 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.625752 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.625819 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.625892 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.625959 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:27Z","lastTransitionTime":"2025-12-05T05:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.729212 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.729283 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.729309 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.729346 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.729371 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:27Z","lastTransitionTime":"2025-12-05T05:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.837015 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.837392 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.837557 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.837703 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.837859 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:27Z","lastTransitionTime":"2025-12-05T05:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.878787 4742 generic.go:334] "Generic (PLEG): container finished" podID="130d4974-9fb6-4cdb-b115-56d2a96b1438" containerID="3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d" exitCode=0 Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.878893 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" event={"ID":"130d4974-9fb6-4cdb-b115-56d2a96b1438","Type":"ContainerDied","Data":"3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d"} Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.909993 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.932457 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.943276 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.943345 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.943380 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.943399 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.943412 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:27Z","lastTransitionTime":"2025-12-05T05:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.944730 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.956437 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.967485 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:27 crc kubenswrapper[4742]: I1205 05:52:27.982089 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.003606 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.025016 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.046199 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.046259 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.046273 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.046294 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.046305 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:28Z","lastTransitionTime":"2025-12-05T05:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.053771 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.063707 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.075941 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.098408 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.164106 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.164163 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.164178 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.164201 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.164215 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:28Z","lastTransitionTime":"2025-12-05T05:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.182284 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.233107 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.245227 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.266412 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.266442 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.266450 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.266464 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.266473 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:28Z","lastTransitionTime":"2025-12-05T05:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.369669 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.369732 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.369756 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.369783 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.369804 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:28Z","lastTransitionTime":"2025-12-05T05:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.472869 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.472909 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.472922 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.472939 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.472952 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:28Z","lastTransitionTime":"2025-12-05T05:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.574468 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.574705 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.574713 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.574726 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.574736 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:28Z","lastTransitionTime":"2025-12-05T05:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.676807 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.676849 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.676872 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.676888 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.676899 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:28Z","lastTransitionTime":"2025-12-05T05:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.790394 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.790455 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.790473 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.790788 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.790835 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:28Z","lastTransitionTime":"2025-12-05T05:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.888611 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" event={"ID":"06ddc689-50f2-409f-9ac8-8f6a1bed0831","Type":"ContainerStarted","Data":"a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1"} Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.891948 4742 generic.go:334] "Generic (PLEG): container finished" podID="130d4974-9fb6-4cdb-b115-56d2a96b1438" containerID="5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7" exitCode=0 Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.891983 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" event={"ID":"130d4974-9fb6-4cdb-b115-56d2a96b1438","Type":"ContainerDied","Data":"5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7"} Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.893317 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.893391 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.893403 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.893417 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.893428 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:28Z","lastTransitionTime":"2025-12-05T05:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.915827 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.935196 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.951738 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.963119 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.974400 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:28 crc kubenswrapper[4742]: I1205 05:52:28.989033 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.000456 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:28Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.004475 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.004506 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.004517 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.004534 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.004547 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:29Z","lastTransitionTime":"2025-12-05T05:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.011912 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:29Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.030363 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:29Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.042701 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:29Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.056728 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:29Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.071737 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:29Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.082237 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:29Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.094050 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:29Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.102578 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:29Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.106128 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.106165 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.106176 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.106222 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.106233 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:29Z","lastTransitionTime":"2025-12-05T05:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.209111 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.209176 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.209195 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.209220 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.209238 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:29Z","lastTransitionTime":"2025-12-05T05:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.311880 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.311919 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.311928 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.311942 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.311951 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:29Z","lastTransitionTime":"2025-12-05T05:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.382170 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.382170 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.382337 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:29 crc kubenswrapper[4742]: E1205 05:52:29.382537 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:52:29 crc kubenswrapper[4742]: E1205 05:52:29.382670 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:52:29 crc kubenswrapper[4742]: E1205 05:52:29.382838 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.415772 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.415835 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.415874 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.415905 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.415927 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:29Z","lastTransitionTime":"2025-12-05T05:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.519319 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.519391 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.519409 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.519441 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.519460 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:29Z","lastTransitionTime":"2025-12-05T05:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.621736 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.621787 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.621798 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.621816 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.621829 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:29Z","lastTransitionTime":"2025-12-05T05:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.724716 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.724765 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.724780 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.724802 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.724821 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:29Z","lastTransitionTime":"2025-12-05T05:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.827994 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.828052 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.828107 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.828132 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.828149 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:29Z","lastTransitionTime":"2025-12-05T05:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.899869 4742 generic.go:334] "Generic (PLEG): container finished" podID="130d4974-9fb6-4cdb-b115-56d2a96b1438" containerID="6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe" exitCode=0 Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.899929 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" event={"ID":"130d4974-9fb6-4cdb-b115-56d2a96b1438","Type":"ContainerDied","Data":"6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe"} Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.924922 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:29Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.931008 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.931089 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.931106 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.931130 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.931149 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:29Z","lastTransitionTime":"2025-12-05T05:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.946765 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:29Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.957127 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:29Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.977913 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:29Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:29 crc kubenswrapper[4742]: I1205 05:52:29.990493 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:29Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.004783 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:30Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.019469 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:30Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.034260 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:30Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.034298 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.034848 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.035047 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.035301 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.035500 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:30Z","lastTransitionTime":"2025-12-05T05:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.048190 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:30Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.062536 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:30Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.080882 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:30Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.098431 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:30Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.113661 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:30Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.136379 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:30Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.140447 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.140582 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.140593 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.140606 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.140615 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:30Z","lastTransitionTime":"2025-12-05T05:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.158798 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:30Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.242395 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.242520 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.242535 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.242551 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.242561 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:30Z","lastTransitionTime":"2025-12-05T05:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.346536 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.346589 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.346605 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.346628 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.346645 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:30Z","lastTransitionTime":"2025-12-05T05:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.449782 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.449829 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.449839 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.449856 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.449867 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:30Z","lastTransitionTime":"2025-12-05T05:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.553167 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.553199 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.553213 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.553268 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.553281 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:30Z","lastTransitionTime":"2025-12-05T05:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.656782 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.656817 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.656827 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.656852 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.656864 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:30Z","lastTransitionTime":"2025-12-05T05:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.759198 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.759238 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.759247 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.759261 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.759271 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:30Z","lastTransitionTime":"2025-12-05T05:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.862439 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.862479 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.862490 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.862507 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.862518 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:30Z","lastTransitionTime":"2025-12-05T05:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.909744 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" event={"ID":"130d4974-9fb6-4cdb-b115-56d2a96b1438","Type":"ContainerStarted","Data":"581c48baad5c52f537dbeef4685c29e54b3748256140bac370782fe889a4330d"} Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.917113 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" event={"ID":"06ddc689-50f2-409f-9ac8-8f6a1bed0831","Type":"ContainerStarted","Data":"bcd9038a33513a68d90693d51341e45c02b3b29fd4678d3cc3c46bb0781141f2"} Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.918253 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.918392 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.930125 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:30Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.942451 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.942680 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.946655 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:30Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.965619 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581c48baad5c52f537dbeef4685c29e54b3748256140bac370782fe889a4330d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:30Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.967485 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.967522 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.967534 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.967550 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.967558 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:30Z","lastTransitionTime":"2025-12-05T05:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:30 crc kubenswrapper[4742]: I1205 05:52:30.989165 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:30Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.007827 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:31Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.021570 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:31Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.025939 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:52:31 crc kubenswrapper[4742]: E1205 05:52:31.026701 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:52:39.026678644 +0000 UTC m=+34.938813716 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.034603 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:31Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.056686 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:31Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.069842 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.069885 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.069898 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.069917 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.069868 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:31Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.069929 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:31Z","lastTransitionTime":"2025-12-05T05:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.082859 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:31Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.101572 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:31Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.119909 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:31Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.126851 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.126889 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.126914 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.126935 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:52:31 crc kubenswrapper[4742]: E1205 05:52:31.127032 4742 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 05:52:31 crc kubenswrapper[4742]: E1205 05:52:31.127047 4742 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 05:52:31 crc kubenswrapper[4742]: E1205 05:52:31.127078 4742 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:52:31 crc kubenswrapper[4742]: E1205 05:52:31.127113 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 05:52:39.127101695 +0000 UTC m=+35.039236757 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:52:31 crc kubenswrapper[4742]: E1205 05:52:31.127162 4742 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 05:52:31 crc kubenswrapper[4742]: E1205 05:52:31.127210 4742 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 05:52:31 crc kubenswrapper[4742]: E1205 05:52:31.127251 4742 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 05:52:31 crc kubenswrapper[4742]: E1205 05:52:31.127296 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 05:52:39.12726067 +0000 UTC m=+35.039395772 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 05:52:31 crc kubenswrapper[4742]: E1205 05:52:31.127295 4742 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 05:52:31 crc kubenswrapper[4742]: E1205 05:52:31.127337 4742 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:52:31 crc kubenswrapper[4742]: E1205 05:52:31.127340 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 05:52:39.127316911 +0000 UTC m=+35.039452093 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 05:52:31 crc kubenswrapper[4742]: E1205 05:52:31.127417 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 05:52:39.127393183 +0000 UTC m=+35.039528295 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.136648 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:31Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.148914 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:31Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.163551 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:31Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.172351 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.172409 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.172426 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.172451 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.172469 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:31Z","lastTransitionTime":"2025-12-05T05:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.177879 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:31Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.196487 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:31Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.207016 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:31Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.229213 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:31Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.246284 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:31Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.263600 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:31Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.274505 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.274555 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.274567 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.274586 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.274598 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:31Z","lastTransitionTime":"2025-12-05T05:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.280775 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:31Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.296473 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:31Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.314470 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:31Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.331770 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:31Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.346836 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:31Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.363467 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:31Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.377089 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.377163 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.377187 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.377216 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.377238 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:31Z","lastTransitionTime":"2025-12-05T05:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.382682 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.382784 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.382685 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:31 crc kubenswrapper[4742]: E1205 05:52:31.382875 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:52:31 crc kubenswrapper[4742]: E1205 05:52:31.382992 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:52:31 crc kubenswrapper[4742]: E1205 05:52:31.383196 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.384892 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:31Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.408705 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581c48baad5c52f537dbeef4685c29e54b3748256140bac370782fe889a4330d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:31Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.433008 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcd9038a33513a68d90693d51341e45c02b3b29fd4678d3cc3c46bb0781141f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:31Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.479839 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.479885 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.479896 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.479910 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.479920 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:31Z","lastTransitionTime":"2025-12-05T05:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.583190 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.583251 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.583273 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.583298 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.583316 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:31Z","lastTransitionTime":"2025-12-05T05:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.686702 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.686757 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.686766 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.686780 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.686794 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:31Z","lastTransitionTime":"2025-12-05T05:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.790025 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.790147 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.790207 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.790239 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.790260 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:31Z","lastTransitionTime":"2025-12-05T05:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.893830 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.893890 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.893908 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.893932 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.893951 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:31Z","lastTransitionTime":"2025-12-05T05:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.920430 4742 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.998108 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.998174 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.998198 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.998228 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:31 crc kubenswrapper[4742]: I1205 05:52:31.998247 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:31Z","lastTransitionTime":"2025-12-05T05:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.101511 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.101576 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.101602 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.101631 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.101658 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:32Z","lastTransitionTime":"2025-12-05T05:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.204986 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.205093 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.205119 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.205560 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.205617 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:32Z","lastTransitionTime":"2025-12-05T05:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.307607 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.307657 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.307674 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.307694 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.307714 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:32Z","lastTransitionTime":"2025-12-05T05:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.388855 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.409323 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.409375 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.409393 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.409415 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.409430 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:32Z","lastTransitionTime":"2025-12-05T05:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.511890 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.511932 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.511943 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.511960 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.511970 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:32Z","lastTransitionTime":"2025-12-05T05:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.614653 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.614722 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.614739 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.614765 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.614781 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:32Z","lastTransitionTime":"2025-12-05T05:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.718233 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.718273 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.718286 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.718302 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.718311 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:32Z","lastTransitionTime":"2025-12-05T05:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.820531 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.820835 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.820848 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.820864 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.820875 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:32Z","lastTransitionTime":"2025-12-05T05:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.923130 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.923163 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.923177 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.923201 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:32 crc kubenswrapper[4742]: I1205 05:52:32.923213 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:32Z","lastTransitionTime":"2025-12-05T05:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.025842 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.025880 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.025898 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.025914 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.025924 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:33Z","lastTransitionTime":"2025-12-05T05:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.127867 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.127900 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.127909 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.127922 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.127930 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:33Z","lastTransitionTime":"2025-12-05T05:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.231707 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.231765 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.231781 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.231804 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.231825 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:33Z","lastTransitionTime":"2025-12-05T05:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.334744 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.334795 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.334810 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.334830 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.334843 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:33Z","lastTransitionTime":"2025-12-05T05:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.382406 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.382483 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:33 crc kubenswrapper[4742]: E1205 05:52:33.382537 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:52:33 crc kubenswrapper[4742]: E1205 05:52:33.382683 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.382402 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:52:33 crc kubenswrapper[4742]: E1205 05:52:33.382823 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.437953 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.438351 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.438416 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.438448 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.438470 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:33Z","lastTransitionTime":"2025-12-05T05:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.541771 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.541837 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.541861 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.541889 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.541910 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:33Z","lastTransitionTime":"2025-12-05T05:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.645191 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.645257 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.645273 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.645295 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.645311 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:33Z","lastTransitionTime":"2025-12-05T05:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.748570 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.748706 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.748740 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.748771 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.748793 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:33Z","lastTransitionTime":"2025-12-05T05:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.852355 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.852436 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.852463 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.852489 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.852510 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:33Z","lastTransitionTime":"2025-12-05T05:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.929559 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m9jc4_06ddc689-50f2-409f-9ac8-8f6a1bed0831/ovnkube-controller/0.log" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.933431 4742 generic.go:334] "Generic (PLEG): container finished" podID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerID="bcd9038a33513a68d90693d51341e45c02b3b29fd4678d3cc3c46bb0781141f2" exitCode=1 Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.933525 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" event={"ID":"06ddc689-50f2-409f-9ac8-8f6a1bed0831","Type":"ContainerDied","Data":"bcd9038a33513a68d90693d51341e45c02b3b29fd4678d3cc3c46bb0781141f2"} Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.934474 4742 scope.go:117] "RemoveContainer" containerID="bcd9038a33513a68d90693d51341e45c02b3b29fd4678d3cc3c46bb0781141f2" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.955005 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.955099 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.955119 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.955144 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.955165 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:33Z","lastTransitionTime":"2025-12-05T05:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.967787 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcd9038a33513a68d90693d51341e45c02b3b29fd4678d3cc3c46bb0781141f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd9038a33513a68d90693d51341e45c02b3b29fd4678d3cc3c46bb0781141f2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:52:33Z\\\",\\\"message\\\":\\\"-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:52:32.867275 6021 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:52:32.867397 6021 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:52:32.867504 6021 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:52:32.867620 6021 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:52:32.867723 6021 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:52:32.867891 6021 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI1205 05:52:32.867952 6021 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:33Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:33 crc kubenswrapper[4742]: I1205 05:52:33.987989 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:33Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.010249 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.030532 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581c48baad5c52f537dbeef4685c29e54b3748256140bac370782fe889a4330d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.047484 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.057977 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.058015 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.058026 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.058087 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.058102 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:34Z","lastTransitionTime":"2025-12-05T05:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.065005 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.080223 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.103037 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.127012 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.156694 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.160813 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.160881 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.160905 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.160937 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.160977 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:34Z","lastTransitionTime":"2025-12-05T05:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.183907 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.204728 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.218230 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.231531 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.243489 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.263208 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.263235 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.263285 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.263298 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.263308 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:34Z","lastTransitionTime":"2025-12-05T05:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.365394 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.365438 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.365449 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.365465 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.365476 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:34Z","lastTransitionTime":"2025-12-05T05:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.403599 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.421619 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.437146 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.454240 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.467860 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.467902 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.467921 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.467947 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.467964 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:34Z","lastTransitionTime":"2025-12-05T05:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.473984 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.492894 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.514581 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.540660 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581c48baad5c52f537dbeef4685c29e54b3748256140bac370782fe889a4330d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.564737 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bcd9038a33513a68d90693d51341e45c02b3b29fd4678d3cc3c46bb0781141f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd9038a33513a68d90693d51341e45c02b3b29fd4678d3cc3c46bb0781141f2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:52:33Z\\\",\\\"message\\\":\\\"-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:52:32.867275 6021 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:52:32.867397 6021 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:52:32.867504 6021 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:52:32.867620 6021 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:52:32.867723 6021 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:52:32.867891 6021 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI1205 05:52:32.867952 6021 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.570616 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.570888 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.571106 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.571287 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.571427 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:34Z","lastTransitionTime":"2025-12-05T05:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.586136 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.609121 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.624799 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.659611 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.674204 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.674256 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.674275 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.674302 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.674322 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:34Z","lastTransitionTime":"2025-12-05T05:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.674993 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.694589 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.775954 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.775985 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.775993 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.776005 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.776013 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:34Z","lastTransitionTime":"2025-12-05T05:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.878952 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.878991 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.879002 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.879018 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.879030 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:34Z","lastTransitionTime":"2025-12-05T05:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.941612 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m9jc4_06ddc689-50f2-409f-9ac8-8f6a1bed0831/ovnkube-controller/0.log" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.944109 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" event={"ID":"06ddc689-50f2-409f-9ac8-8f6a1bed0831","Type":"ContainerStarted","Data":"c6e54b12a8993b397af3908cc8042f2907d863bb09c092d247bff3383534d624"} Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.944639 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.955256 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.966560 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.976588 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.980435 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.980462 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.980472 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.980489 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.980501 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:34Z","lastTransitionTime":"2025-12-05T05:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.986853 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:34 crc kubenswrapper[4742]: I1205 05:52:34.995555 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.010707 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6e54b12a8993b397af3908cc8042f2907d863bb09c092d247bff3383534d624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd9038a33513a68d90693d51341e45c02b3b29fd4678d3cc3c46bb0781141f2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:52:33Z\\\",\\\"message\\\":\\\"-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:52:32.867275 6021 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:52:32.867397 6021 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:52:32.867504 6021 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:52:32.867620 6021 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:52:32.867723 6021 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:52:32.867891 6021 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI1205 05:52:32.867952 6021 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:35Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.025160 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:35Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.037030 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:35Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.053709 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581c48baad5c52f537dbeef4685c29e54b3748256140bac370782fe889a4330d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:35Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.065104 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:35Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.076943 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:35Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.083129 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.083171 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.083181 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.083195 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.083204 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:35Z","lastTransitionTime":"2025-12-05T05:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.086182 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:35Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.104539 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:35Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.115903 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:35Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.127132 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:35Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.185227 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.185264 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.185275 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.185291 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.185302 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:35Z","lastTransitionTime":"2025-12-05T05:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.287578 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.287606 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.287614 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.287626 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.287635 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:35Z","lastTransitionTime":"2025-12-05T05:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.382179 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.382217 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:52:35 crc kubenswrapper[4742]: E1205 05:52:35.382317 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.382407 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:35 crc kubenswrapper[4742]: E1205 05:52:35.382481 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:52:35 crc kubenswrapper[4742]: E1205 05:52:35.382626 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.389899 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.389947 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.389960 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.389975 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.389987 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:35Z","lastTransitionTime":"2025-12-05T05:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.494406 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.494479 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.494519 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.494547 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.494565 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:35Z","lastTransitionTime":"2025-12-05T05:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.597732 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.597795 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.597816 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.597840 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.597858 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:35Z","lastTransitionTime":"2025-12-05T05:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.700826 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.700943 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.700970 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.701001 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.701026 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:35Z","lastTransitionTime":"2025-12-05T05:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.803504 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.803582 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.803602 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.803628 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.803645 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:35Z","lastTransitionTime":"2025-12-05T05:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.906395 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.906455 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.906474 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.906500 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:35 crc kubenswrapper[4742]: I1205 05:52:35.906524 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:35Z","lastTransitionTime":"2025-12-05T05:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.008839 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.008885 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.008896 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.008912 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.008921 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:36Z","lastTransitionTime":"2025-12-05T05:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.112629 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.112682 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.112698 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.112721 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.112764 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:36Z","lastTransitionTime":"2025-12-05T05:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.215223 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.215277 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.215294 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.215319 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.215334 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:36Z","lastTransitionTime":"2025-12-05T05:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.277031 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.277135 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.277194 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.277229 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.277252 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:36Z","lastTransitionTime":"2025-12-05T05:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:36 crc kubenswrapper[4742]: E1205 05:52:36.300850 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:36Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.305760 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.305812 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.305829 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.305850 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.305866 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:36Z","lastTransitionTime":"2025-12-05T05:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:36 crc kubenswrapper[4742]: E1205 05:52:36.327438 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:36Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.332570 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.332625 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.332641 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.332665 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.332682 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:36Z","lastTransitionTime":"2025-12-05T05:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:36 crc kubenswrapper[4742]: E1205 05:52:36.358016 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:36Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.362860 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.362928 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.362946 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.362972 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.362989 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:36Z","lastTransitionTime":"2025-12-05T05:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:36 crc kubenswrapper[4742]: E1205 05:52:36.383279 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:36Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.386780 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.386839 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.386855 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.386893 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.386908 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:36Z","lastTransitionTime":"2025-12-05T05:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:36 crc kubenswrapper[4742]: E1205 05:52:36.398770 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:36Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:36 crc kubenswrapper[4742]: E1205 05:52:36.398929 4742 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.400328 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.400364 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.400374 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.400390 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.400398 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:36Z","lastTransitionTime":"2025-12-05T05:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.503041 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.503148 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.503175 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.503207 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.503227 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:36Z","lastTransitionTime":"2025-12-05T05:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.607250 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.607329 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.607368 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.607385 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.607395 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:36Z","lastTransitionTime":"2025-12-05T05:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.710746 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.710878 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.710905 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.710938 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.710972 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:36Z","lastTransitionTime":"2025-12-05T05:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.760850 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p"] Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.761275 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.763705 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.763905 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.783546 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:36Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.797990 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:36Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.810935 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581c48baad5c52f537dbeef4685c29e54b3748256140bac370782fe889a4330d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:36Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.813513 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.813547 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.813556 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.813571 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.813579 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:36Z","lastTransitionTime":"2025-12-05T05:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.839681 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6e54b12a8993b397af3908cc8042f2907d863bb09c092d247bff3383534d624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd9038a33513a68d90693d51341e45c02b3b29fd4678d3cc3c46bb0781141f2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:52:33Z\\\",\\\"message\\\":\\\"-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:52:32.867275 6021 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:52:32.867397 6021 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:52:32.867504 6021 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:52:32.867620 6021 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:52:32.867723 6021 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:52:32.867891 6021 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI1205 05:52:32.867952 6021 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:36Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.848700 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:36Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.861945 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:36Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.874897 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:36Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.886894 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/056de541-2d3a-4782-a2cc-0c96c465ca6f-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-hv29p\" (UID: \"056de541-2d3a-4782-a2cc-0c96c465ca6f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.887078 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/056de541-2d3a-4782-a2cc-0c96c465ca6f-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-hv29p\" (UID: \"056de541-2d3a-4782-a2cc-0c96c465ca6f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.887241 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/056de541-2d3a-4782-a2cc-0c96c465ca6f-env-overrides\") pod \"ovnkube-control-plane-749d76644c-hv29p\" (UID: \"056de541-2d3a-4782-a2cc-0c96c465ca6f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.887329 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2nh29\" (UniqueName: \"kubernetes.io/projected/056de541-2d3a-4782-a2cc-0c96c465ca6f-kube-api-access-2nh29\") pod \"ovnkube-control-plane-749d76644c-hv29p\" (UID: \"056de541-2d3a-4782-a2cc-0c96c465ca6f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.888084 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"056de541-2d3a-4782-a2cc-0c96c465ca6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hv29p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:36Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.908833 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:36Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.916151 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.916181 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.916189 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.916203 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.916212 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:36Z","lastTransitionTime":"2025-12-05T05:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.927604 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:36Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.943899 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:36Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.951712 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m9jc4_06ddc689-50f2-409f-9ac8-8f6a1bed0831/ovnkube-controller/1.log" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.952359 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m9jc4_06ddc689-50f2-409f-9ac8-8f6a1bed0831/ovnkube-controller/0.log" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.954834 4742 generic.go:334] "Generic (PLEG): container finished" podID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerID="c6e54b12a8993b397af3908cc8042f2907d863bb09c092d247bff3383534d624" exitCode=1 Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.954875 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" event={"ID":"06ddc689-50f2-409f-9ac8-8f6a1bed0831","Type":"ContainerDied","Data":"c6e54b12a8993b397af3908cc8042f2907d863bb09c092d247bff3383534d624"} Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.954927 4742 scope.go:117] "RemoveContainer" containerID="bcd9038a33513a68d90693d51341e45c02b3b29fd4678d3cc3c46bb0781141f2" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.956133 4742 scope.go:117] "RemoveContainer" containerID="c6e54b12a8993b397af3908cc8042f2907d863bb09c092d247bff3383534d624" Dec 05 05:52:36 crc kubenswrapper[4742]: E1205 05:52:36.956421 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-m9jc4_openshift-ovn-kubernetes(06ddc689-50f2-409f-9ac8-8f6a1bed0831)\"" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.961498 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:36Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.978737 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:36Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.987978 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/056de541-2d3a-4782-a2cc-0c96c465ca6f-env-overrides\") pod \"ovnkube-control-plane-749d76644c-hv29p\" (UID: \"056de541-2d3a-4782-a2cc-0c96c465ca6f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.988022 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2nh29\" (UniqueName: \"kubernetes.io/projected/056de541-2d3a-4782-a2cc-0c96c465ca6f-kube-api-access-2nh29\") pod \"ovnkube-control-plane-749d76644c-hv29p\" (UID: \"056de541-2d3a-4782-a2cc-0c96c465ca6f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.988089 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/056de541-2d3a-4782-a2cc-0c96c465ca6f-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-hv29p\" (UID: \"056de541-2d3a-4782-a2cc-0c96c465ca6f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.988111 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/056de541-2d3a-4782-a2cc-0c96c465ca6f-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-hv29p\" (UID: \"056de541-2d3a-4782-a2cc-0c96c465ca6f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.988977 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/056de541-2d3a-4782-a2cc-0c96c465ca6f-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-hv29p\" (UID: \"056de541-2d3a-4782-a2cc-0c96c465ca6f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.988981 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/056de541-2d3a-4782-a2cc-0c96c465ca6f-env-overrides\") pod \"ovnkube-control-plane-749d76644c-hv29p\" (UID: \"056de541-2d3a-4782-a2cc-0c96c465ca6f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.995097 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/056de541-2d3a-4782-a2cc-0c96c465ca6f-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-hv29p\" (UID: \"056de541-2d3a-4782-a2cc-0c96c465ca6f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" Dec 05 05:52:36 crc kubenswrapper[4742]: I1205 05:52:36.995403 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:36Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.004801 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2nh29\" (UniqueName: \"kubernetes.io/projected/056de541-2d3a-4782-a2cc-0c96c465ca6f-kube-api-access-2nh29\") pod \"ovnkube-control-plane-749d76644c-hv29p\" (UID: \"056de541-2d3a-4782-a2cc-0c96c465ca6f\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.014114 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:37Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.018238 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.018281 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.018294 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.018311 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.018328 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:37Z","lastTransitionTime":"2025-12-05T05:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.029450 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:37Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.045556 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:37Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.060050 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:37Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.077415 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581c48baad5c52f537dbeef4685c29e54b3748256140bac370782fe889a4330d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:37Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.082255 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.097879 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6e54b12a8993b397af3908cc8042f2907d863bb09c092d247bff3383534d624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd9038a33513a68d90693d51341e45c02b3b29fd4678d3cc3c46bb0781141f2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:52:33Z\\\",\\\"message\\\":\\\"-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:52:32.867275 6021 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:52:32.867397 6021 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:52:32.867504 6021 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:52:32.867620 6021 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:52:32.867723 6021 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:52:32.867891 6021 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI1205 05:52:32.867952 6021 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6e54b12a8993b397af3908cc8042f2907d863bb09c092d247bff3383534d624\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"lumn:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:35.307762 6146 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI1205 05:52:35.308091 6146 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1205 05:52:35.307897 6146 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-image-registry/image-registry]} name:Service_openshift-image-registry/image-registry_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.93:5000:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {83c1e277-3d22-42ae-a355-f7a0ff0bd171}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:35.308134 6146 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-no\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:37Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:37 crc kubenswrapper[4742]: W1205 05:52:37.103542 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod056de541_2d3a_4782_a2cc_0c96c465ca6f.slice/crio-e220233292e1ba898483eb79c7ff2f7f95b713d3405e4ac3203253676872ea3c WatchSource:0}: Error finding container e220233292e1ba898483eb79c7ff2f7f95b713d3405e4ac3203253676872ea3c: Status 404 returned error can't find the container with id e220233292e1ba898483eb79c7ff2f7f95b713d3405e4ac3203253676872ea3c Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.110090 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:37Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.120952 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.120984 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.120995 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.121009 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.121020 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:37Z","lastTransitionTime":"2025-12-05T05:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.126169 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:37Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.138026 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:37Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.150580 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"056de541-2d3a-4782-a2cc-0c96c465ca6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hv29p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:37Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.172951 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:37Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.185982 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:37Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.199788 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:37Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.212820 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:37Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.223169 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:37Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.224549 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.224584 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.224593 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.224610 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.224619 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:37Z","lastTransitionTime":"2025-12-05T05:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.235125 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:37Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.246173 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:37Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.258834 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:37Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.327386 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.327432 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.327448 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.327467 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.327481 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:37Z","lastTransitionTime":"2025-12-05T05:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.382104 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.382133 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.382150 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:52:37 crc kubenswrapper[4742]: E1205 05:52:37.382228 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:52:37 crc kubenswrapper[4742]: E1205 05:52:37.382370 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:52:37 crc kubenswrapper[4742]: E1205 05:52:37.382428 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.429283 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.429312 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.429322 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.429334 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.429345 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:37Z","lastTransitionTime":"2025-12-05T05:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.532185 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.532228 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.532236 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.532249 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.532257 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:37Z","lastTransitionTime":"2025-12-05T05:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.635342 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.635389 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.635404 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.635421 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.635433 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:37Z","lastTransitionTime":"2025-12-05T05:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.738911 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.738967 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.738986 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.739009 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.739031 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:37Z","lastTransitionTime":"2025-12-05T05:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.842896 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.842952 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.842967 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.842987 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.843003 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:37Z","lastTransitionTime":"2025-12-05T05:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.856571 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-pbtb4"] Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.857561 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:52:37 crc kubenswrapper[4742]: E1205 05:52:37.857697 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.880536 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:37Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.896542 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:37Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.915255 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"056de541-2d3a-4782-a2cc-0c96c465ca6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hv29p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:37Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.931569 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pbtb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69352e1-2d48-4211-83e1-25d09fff9d3c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pbtb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:37Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.946707 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:37Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.946854 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.946895 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.946915 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.946943 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.946965 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:37Z","lastTransitionTime":"2025-12-05T05:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.960471 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" event={"ID":"056de541-2d3a-4782-a2cc-0c96c465ca6f","Type":"ContainerStarted","Data":"cbe2749cda239484a91be5aeb17a4d22cf1e190e814880af130c97a903ee8d33"} Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.960519 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" event={"ID":"056de541-2d3a-4782-a2cc-0c96c465ca6f","Type":"ContainerStarted","Data":"ff553f2d53d8dbc9847c9055d7f9bf398e1ce8a955100a41e958800ad730e4dc"} Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.960532 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" event={"ID":"056de541-2d3a-4782-a2cc-0c96c465ca6f","Type":"ContainerStarted","Data":"e220233292e1ba898483eb79c7ff2f7f95b713d3405e4ac3203253676872ea3c"} Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.963449 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m9jc4_06ddc689-50f2-409f-9ac8-8f6a1bed0831/ovnkube-controller/1.log" Dec 05 05:52:37 crc kubenswrapper[4742]: I1205 05:52:37.979660 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:37Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.000715 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vv5v2\" (UniqueName: \"kubernetes.io/projected/b69352e1-2d48-4211-83e1-25d09fff9d3c-kube-api-access-vv5v2\") pod \"network-metrics-daemon-pbtb4\" (UID: \"b69352e1-2d48-4211-83e1-25d09fff9d3c\") " pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.000895 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b69352e1-2d48-4211-83e1-25d09fff9d3c-metrics-certs\") pod \"network-metrics-daemon-pbtb4\" (UID: \"b69352e1-2d48-4211-83e1-25d09fff9d3c\") " pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.001698 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:37Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.021273 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.037305 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.050023 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.050128 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.050158 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.050247 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.050290 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:38Z","lastTransitionTime":"2025-12-05T05:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.053894 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.067781 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.081746 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.096143 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.101842 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b69352e1-2d48-4211-83e1-25d09fff9d3c-metrics-certs\") pod \"network-metrics-daemon-pbtb4\" (UID: \"b69352e1-2d48-4211-83e1-25d09fff9d3c\") " pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:52:38 crc kubenswrapper[4742]: E1205 05:52:38.102774 4742 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 05:52:38 crc kubenswrapper[4742]: E1205 05:52:38.102847 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b69352e1-2d48-4211-83e1-25d09fff9d3c-metrics-certs podName:b69352e1-2d48-4211-83e1-25d09fff9d3c nodeName:}" failed. No retries permitted until 2025-12-05 05:52:38.602824054 +0000 UTC m=+34.514959156 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b69352e1-2d48-4211-83e1-25d09fff9d3c-metrics-certs") pod "network-metrics-daemon-pbtb4" (UID: "b69352e1-2d48-4211-83e1-25d09fff9d3c") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.102879 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vv5v2\" (UniqueName: \"kubernetes.io/projected/b69352e1-2d48-4211-83e1-25d09fff9d3c-kube-api-access-vv5v2\") pod \"network-metrics-daemon-pbtb4\" (UID: \"b69352e1-2d48-4211-83e1-25d09fff9d3c\") " pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.111032 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.127755 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vv5v2\" (UniqueName: \"kubernetes.io/projected/b69352e1-2d48-4211-83e1-25d09fff9d3c-kube-api-access-vv5v2\") pod \"network-metrics-daemon-pbtb4\" (UID: \"b69352e1-2d48-4211-83e1-25d09fff9d3c\") " pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.133285 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.153524 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.153578 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.153856 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.154281 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.154304 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:38Z","lastTransitionTime":"2025-12-05T05:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.156860 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581c48baad5c52f537dbeef4685c29e54b3748256140bac370782fe889a4330d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.191112 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6e54b12a8993b397af3908cc8042f2907d863bb09c092d247bff3383534d624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd9038a33513a68d90693d51341e45c02b3b29fd4678d3cc3c46bb0781141f2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:52:33Z\\\",\\\"message\\\":\\\"-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:52:32.867275 6021 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:52:32.867397 6021 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:52:32.867504 6021 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:52:32.867620 6021 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:52:32.867723 6021 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:52:32.867891 6021 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI1205 05:52:32.867952 6021 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6e54b12a8993b397af3908cc8042f2907d863bb09c092d247bff3383534d624\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"lumn:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:35.307762 6146 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI1205 05:52:35.308091 6146 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1205 05:52:35.307897 6146 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-image-registry/image-registry]} name:Service_openshift-image-registry/image-registry_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.93:5000:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {83c1e277-3d22-42ae-a355-f7a0ff0bd171}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:35.308134 6146 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-no\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.212607 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.235988 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581c48baad5c52f537dbeef4685c29e54b3748256140bac370782fe889a4330d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.258883 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.258937 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.258956 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.258980 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.258999 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:38Z","lastTransitionTime":"2025-12-05T05:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.266387 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6e54b12a8993b397af3908cc8042f2907d863bb09c092d247bff3383534d624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd9038a33513a68d90693d51341e45c02b3b29fd4678d3cc3c46bb0781141f2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:52:33Z\\\",\\\"message\\\":\\\"-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:52:32.867275 6021 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:52:32.867397 6021 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:52:32.867504 6021 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:52:32.867620 6021 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:52:32.867723 6021 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:52:32.867891 6021 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI1205 05:52:32.867952 6021 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6e54b12a8993b397af3908cc8042f2907d863bb09c092d247bff3383534d624\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"lumn:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:35.307762 6146 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI1205 05:52:35.308091 6146 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1205 05:52:35.307897 6146 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-image-registry/image-registry]} name:Service_openshift-image-registry/image-registry_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.93:5000:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {83c1e277-3d22-42ae-a355-f7a0ff0bd171}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:35.308134 6146 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-no\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.287245 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.303580 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.318832 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"056de541-2d3a-4782-a2cc-0c96c465ca6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff553f2d53d8dbc9847c9055d7f9bf398e1ce8a955100a41e958800ad730e4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbe2749cda239484a91be5aeb17a4d22cf1e190e814880af130c97a903ee8d33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hv29p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.330662 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pbtb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69352e1-2d48-4211-83e1-25d09fff9d3c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pbtb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.340733 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.354572 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.361716 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.361756 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.361790 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.361810 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.361822 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:38Z","lastTransitionTime":"2025-12-05T05:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.371124 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.385433 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.416764 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.432693 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.449450 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.465032 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.465134 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.465152 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.465175 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.465193 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:38Z","lastTransitionTime":"2025-12-05T05:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.469378 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.489245 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.507570 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.567939 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.568086 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.568114 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.568147 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.568169 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:38Z","lastTransitionTime":"2025-12-05T05:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.608201 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b69352e1-2d48-4211-83e1-25d09fff9d3c-metrics-certs\") pod \"network-metrics-daemon-pbtb4\" (UID: \"b69352e1-2d48-4211-83e1-25d09fff9d3c\") " pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:52:38 crc kubenswrapper[4742]: E1205 05:52:38.608379 4742 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 05:52:38 crc kubenswrapper[4742]: E1205 05:52:38.608475 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b69352e1-2d48-4211-83e1-25d09fff9d3c-metrics-certs podName:b69352e1-2d48-4211-83e1-25d09fff9d3c nodeName:}" failed. No retries permitted until 2025-12-05 05:52:39.608450436 +0000 UTC m=+35.520585538 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b69352e1-2d48-4211-83e1-25d09fff9d3c-metrics-certs") pod "network-metrics-daemon-pbtb4" (UID: "b69352e1-2d48-4211-83e1-25d09fff9d3c") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.650017 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.671310 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.671358 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.671376 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.671398 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.671416 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:38Z","lastTransitionTime":"2025-12-05T05:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.684666 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.708974 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.731545 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.749164 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.769105 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.774642 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.774745 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.774765 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.775301 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.775360 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:38Z","lastTransitionTime":"2025-12-05T05:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.789688 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.808577 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.826783 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.847496 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.870227 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.878541 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.878596 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.878615 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.878639 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.878656 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:38Z","lastTransitionTime":"2025-12-05T05:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.896786 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581c48baad5c52f537dbeef4685c29e54b3748256140bac370782fe889a4330d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.932949 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6e54b12a8993b397af3908cc8042f2907d863bb09c092d247bff3383534d624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd9038a33513a68d90693d51341e45c02b3b29fd4678d3cc3c46bb0781141f2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:52:33Z\\\",\\\"message\\\":\\\"-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:52:32.867275 6021 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:52:32.867397 6021 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:52:32.867504 6021 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:52:32.867620 6021 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:52:32.867723 6021 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:52:32.867891 6021 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI1205 05:52:32.867952 6021 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6e54b12a8993b397af3908cc8042f2907d863bb09c092d247bff3383534d624\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"lumn:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:35.307762 6146 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI1205 05:52:35.308091 6146 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1205 05:52:35.307897 6146 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-image-registry/image-registry]} name:Service_openshift-image-registry/image-registry_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.93:5000:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {83c1e277-3d22-42ae-a355-f7a0ff0bd171}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:35.308134 6146 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-no\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.950892 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.976685 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.983332 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.983408 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.983427 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.983455 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.983473 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:38Z","lastTransitionTime":"2025-12-05T05:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:38 crc kubenswrapper[4742]: I1205 05:52:38.990917 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.004463 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"056de541-2d3a-4782-a2cc-0c96c465ca6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff553f2d53d8dbc9847c9055d7f9bf398e1ce8a955100a41e958800ad730e4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbe2749cda239484a91be5aeb17a4d22cf1e190e814880af130c97a903ee8d33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hv29p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:39Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.013736 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pbtb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69352e1-2d48-4211-83e1-25d09fff9d3c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pbtb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:39Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.086320 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.086398 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.086419 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.086450 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.086472 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:39Z","lastTransitionTime":"2025-12-05T05:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.114985 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:52:39 crc kubenswrapper[4742]: E1205 05:52:39.115178 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:52:55.115144123 +0000 UTC m=+51.027279225 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.189508 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.189582 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.189603 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.189627 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.189647 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:39Z","lastTransitionTime":"2025-12-05T05:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.216730 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.216837 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.216913 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:52:39 crc kubenswrapper[4742]: E1205 05:52:39.216936 4742 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.216993 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:52:39 crc kubenswrapper[4742]: E1205 05:52:39.217044 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 05:52:55.217015652 +0000 UTC m=+51.129150744 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 05:52:39 crc kubenswrapper[4742]: E1205 05:52:39.217123 4742 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 05:52:39 crc kubenswrapper[4742]: E1205 05:52:39.217221 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 05:52:55.217194497 +0000 UTC m=+51.129329599 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 05:52:39 crc kubenswrapper[4742]: E1205 05:52:39.217262 4742 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 05:52:39 crc kubenswrapper[4742]: E1205 05:52:39.217296 4742 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 05:52:39 crc kubenswrapper[4742]: E1205 05:52:39.217293 4742 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 05:52:39 crc kubenswrapper[4742]: E1205 05:52:39.217324 4742 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:52:39 crc kubenswrapper[4742]: E1205 05:52:39.217334 4742 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 05:52:39 crc kubenswrapper[4742]: E1205 05:52:39.217356 4742 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:52:39 crc kubenswrapper[4742]: E1205 05:52:39.217416 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 05:52:55.217396842 +0000 UTC m=+51.129531944 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:52:39 crc kubenswrapper[4742]: E1205 05:52:39.217456 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 05:52:55.217443233 +0000 UTC m=+51.129578435 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.293333 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.293404 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.293424 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.293450 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.293473 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:39Z","lastTransitionTime":"2025-12-05T05:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.382685 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.382725 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.382750 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.382804 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:39 crc kubenswrapper[4742]: E1205 05:52:39.383445 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:52:39 crc kubenswrapper[4742]: E1205 05:52:39.383698 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:52:39 crc kubenswrapper[4742]: E1205 05:52:39.383808 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:52:39 crc kubenswrapper[4742]: E1205 05:52:39.383924 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.397127 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.397185 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.397204 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.397229 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.397464 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:39Z","lastTransitionTime":"2025-12-05T05:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.500183 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.500240 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.500257 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.500281 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.500298 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:39Z","lastTransitionTime":"2025-12-05T05:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.602862 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.602907 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.602923 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.602948 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.602965 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:39Z","lastTransitionTime":"2025-12-05T05:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.622442 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b69352e1-2d48-4211-83e1-25d09fff9d3c-metrics-certs\") pod \"network-metrics-daemon-pbtb4\" (UID: \"b69352e1-2d48-4211-83e1-25d09fff9d3c\") " pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:52:39 crc kubenswrapper[4742]: E1205 05:52:39.622661 4742 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 05:52:39 crc kubenswrapper[4742]: E1205 05:52:39.622789 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b69352e1-2d48-4211-83e1-25d09fff9d3c-metrics-certs podName:b69352e1-2d48-4211-83e1-25d09fff9d3c nodeName:}" failed. No retries permitted until 2025-12-05 05:52:41.622759142 +0000 UTC m=+37.534894244 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b69352e1-2d48-4211-83e1-25d09fff9d3c-metrics-certs") pod "network-metrics-daemon-pbtb4" (UID: "b69352e1-2d48-4211-83e1-25d09fff9d3c") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.705690 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.705746 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.705763 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.705786 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.705803 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:39Z","lastTransitionTime":"2025-12-05T05:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.809040 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.809129 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.809152 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.809181 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.809200 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:39Z","lastTransitionTime":"2025-12-05T05:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.912546 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.912616 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.912633 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.912659 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:39 crc kubenswrapper[4742]: I1205 05:52:39.912678 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:39Z","lastTransitionTime":"2025-12-05T05:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.015625 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.015703 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.015721 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.015743 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.015759 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:40Z","lastTransitionTime":"2025-12-05T05:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.117971 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.118009 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.118023 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.118044 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.118086 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:40Z","lastTransitionTime":"2025-12-05T05:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.221763 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.221832 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.221855 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.221884 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.221906 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:40Z","lastTransitionTime":"2025-12-05T05:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.325202 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.325279 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.325302 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.325337 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.325360 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:40Z","lastTransitionTime":"2025-12-05T05:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.428394 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.428480 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.428499 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.428522 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.428539 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:40Z","lastTransitionTime":"2025-12-05T05:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.531486 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.531542 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.531559 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.531582 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.531601 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:40Z","lastTransitionTime":"2025-12-05T05:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.634315 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.634669 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.634856 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.635048 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.635276 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:40Z","lastTransitionTime":"2025-12-05T05:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.737991 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.738413 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.738562 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.738713 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.738841 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:40Z","lastTransitionTime":"2025-12-05T05:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.842280 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.842360 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.842384 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.842413 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.842434 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:40Z","lastTransitionTime":"2025-12-05T05:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.944992 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.945026 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.945034 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.945047 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:40 crc kubenswrapper[4742]: I1205 05:52:40.945072 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:40Z","lastTransitionTime":"2025-12-05T05:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.047824 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.047882 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.047902 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.047927 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.047946 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:41Z","lastTransitionTime":"2025-12-05T05:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.151418 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.151492 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.151516 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.151548 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.151570 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:41Z","lastTransitionTime":"2025-12-05T05:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.255011 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.255096 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.255115 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.255161 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.255180 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:41Z","lastTransitionTime":"2025-12-05T05:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.357756 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.357821 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.357840 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.357864 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.357882 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:41Z","lastTransitionTime":"2025-12-05T05:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.382333 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.382375 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.382408 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.382437 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:41 crc kubenswrapper[4742]: E1205 05:52:41.382616 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:52:41 crc kubenswrapper[4742]: E1205 05:52:41.382742 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:52:41 crc kubenswrapper[4742]: E1205 05:52:41.382895 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:52:41 crc kubenswrapper[4742]: E1205 05:52:41.383012 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.460536 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.460594 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.460609 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.460639 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.460656 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:41Z","lastTransitionTime":"2025-12-05T05:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.565194 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.565238 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.565248 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.565267 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.565280 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:41Z","lastTransitionTime":"2025-12-05T05:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.646877 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b69352e1-2d48-4211-83e1-25d09fff9d3c-metrics-certs\") pod \"network-metrics-daemon-pbtb4\" (UID: \"b69352e1-2d48-4211-83e1-25d09fff9d3c\") " pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:52:41 crc kubenswrapper[4742]: E1205 05:52:41.647123 4742 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 05:52:41 crc kubenswrapper[4742]: E1205 05:52:41.647216 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b69352e1-2d48-4211-83e1-25d09fff9d3c-metrics-certs podName:b69352e1-2d48-4211-83e1-25d09fff9d3c nodeName:}" failed. No retries permitted until 2025-12-05 05:52:45.647188097 +0000 UTC m=+41.559323169 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b69352e1-2d48-4211-83e1-25d09fff9d3c-metrics-certs") pod "network-metrics-daemon-pbtb4" (UID: "b69352e1-2d48-4211-83e1-25d09fff9d3c") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.669192 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.669275 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.669300 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.669330 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.669357 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:41Z","lastTransitionTime":"2025-12-05T05:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.773007 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.773095 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.773112 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.773136 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.773153 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:41Z","lastTransitionTime":"2025-12-05T05:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.875943 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.875992 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.876004 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.876021 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.876038 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:41Z","lastTransitionTime":"2025-12-05T05:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.979815 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.979888 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.979914 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.979945 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:41 crc kubenswrapper[4742]: I1205 05:52:41.979970 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:41Z","lastTransitionTime":"2025-12-05T05:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.082512 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.082589 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.082615 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.082644 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.082666 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:42Z","lastTransitionTime":"2025-12-05T05:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.185976 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.186101 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.186129 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.186166 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.186192 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:42Z","lastTransitionTime":"2025-12-05T05:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.289592 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.289665 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.289684 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.289708 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.289725 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:42Z","lastTransitionTime":"2025-12-05T05:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.393100 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.393172 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.393198 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.393227 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.393250 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:42Z","lastTransitionTime":"2025-12-05T05:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.496402 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.496452 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.496472 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.496500 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.496523 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:42Z","lastTransitionTime":"2025-12-05T05:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.599114 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.599164 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.599175 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.599192 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.599204 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:42Z","lastTransitionTime":"2025-12-05T05:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.702510 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.702571 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.702594 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.702627 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.702649 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:42Z","lastTransitionTime":"2025-12-05T05:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.805346 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.805400 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.805416 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.805445 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.805463 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:42Z","lastTransitionTime":"2025-12-05T05:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.908859 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.908915 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.908933 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.908956 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:42 crc kubenswrapper[4742]: I1205 05:52:42.908973 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:42Z","lastTransitionTime":"2025-12-05T05:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.012465 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.012524 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.012541 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.012565 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.012584 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:43Z","lastTransitionTime":"2025-12-05T05:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.115843 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.115900 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.115924 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.115949 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.115964 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:43Z","lastTransitionTime":"2025-12-05T05:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.220443 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.220513 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.220531 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.220556 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.220576 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:43Z","lastTransitionTime":"2025-12-05T05:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.323469 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.323532 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.323550 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.323574 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.323596 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:43Z","lastTransitionTime":"2025-12-05T05:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.381725 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.381787 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.381746 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:52:43 crc kubenswrapper[4742]: E1205 05:52:43.381899 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.381958 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:52:43 crc kubenswrapper[4742]: E1205 05:52:43.382019 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:52:43 crc kubenswrapper[4742]: E1205 05:52:43.382184 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:52:43 crc kubenswrapper[4742]: E1205 05:52:43.382361 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.426767 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.426838 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.426854 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.426879 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.426897 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:43Z","lastTransitionTime":"2025-12-05T05:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.530690 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.530749 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.530769 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.530793 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.530811 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:43Z","lastTransitionTime":"2025-12-05T05:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.633863 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.633940 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.633962 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.633985 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.634002 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:43Z","lastTransitionTime":"2025-12-05T05:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.737178 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.737293 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.737314 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.737340 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.737360 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:43Z","lastTransitionTime":"2025-12-05T05:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.840174 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.840258 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.840315 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.840382 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.840409 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:43Z","lastTransitionTime":"2025-12-05T05:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.943874 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.943927 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.943949 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.943972 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:43 crc kubenswrapper[4742]: I1205 05:52:43.943989 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:43Z","lastTransitionTime":"2025-12-05T05:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.046656 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.046716 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.046734 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.046757 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.046774 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:44Z","lastTransitionTime":"2025-12-05T05:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.150604 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.151150 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.151248 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.151291 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.151357 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:44Z","lastTransitionTime":"2025-12-05T05:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.254451 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.254505 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.254520 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.254542 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.254560 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:44Z","lastTransitionTime":"2025-12-05T05:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.356999 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.357072 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.357086 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.357106 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.357120 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:44Z","lastTransitionTime":"2025-12-05T05:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.401396 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581c48baad5c52f537dbeef4685c29e54b3748256140bac370782fe889a4330d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.432449 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6e54b12a8993b397af3908cc8042f2907d863bb09c092d247bff3383534d624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd9038a33513a68d90693d51341e45c02b3b29fd4678d3cc3c46bb0781141f2\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:52:33Z\\\",\\\"message\\\":\\\"-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:52:32.867275 6021 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:52:32.867397 6021 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:52:32.867504 6021 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:52:32.867620 6021 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 05:52:32.867723 6021 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 05:52:32.867891 6021 reflector.go:311] Stopping reflector *v1.NetworkAttachmentDefinition (0s) from github.com/k8snetworkplumbingwg/network-attachment-definition-client/pkg/client/informers/externalversions/factory.go:117\\\\nI1205 05:52:32.867952 6021 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6e54b12a8993b397af3908cc8042f2907d863bb09c092d247bff3383534d624\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"lumn:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:35.307762 6146 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI1205 05:52:35.308091 6146 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1205 05:52:35.307897 6146 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-image-registry/image-registry]} name:Service_openshift-image-registry/image-registry_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.93:5000:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {83c1e277-3d22-42ae-a355-f7a0ff0bd171}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:35.308134 6146 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-no\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.446980 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.460230 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.460326 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.460369 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.460412 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.460434 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:44Z","lastTransitionTime":"2025-12-05T05:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.462682 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.488323 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"056de541-2d3a-4782-a2cc-0c96c465ca6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff553f2d53d8dbc9847c9055d7f9bf398e1ce8a955100a41e958800ad730e4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbe2749cda239484a91be5aeb17a4d22cf1e190e814880af130c97a903ee8d33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hv29p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.505192 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pbtb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69352e1-2d48-4211-83e1-25d09fff9d3c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pbtb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.521358 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.544528 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.563328 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.563399 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.563423 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.563453 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.563362 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.563487 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:44Z","lastTransitionTime":"2025-12-05T05:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.589692 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.623738 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.644160 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.662964 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.666507 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.666562 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.666576 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.666602 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.666617 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:44Z","lastTransitionTime":"2025-12-05T05:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.681226 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.695666 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.707788 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.720349 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.769959 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.770026 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.770051 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.770114 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.770138 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:44Z","lastTransitionTime":"2025-12-05T05:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.873514 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.873569 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.873585 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.873610 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.873627 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:44Z","lastTransitionTime":"2025-12-05T05:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.976528 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.976582 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.976601 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.976643 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:44 crc kubenswrapper[4742]: I1205 05:52:44.976661 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:44Z","lastTransitionTime":"2025-12-05T05:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.079487 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.079636 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.079668 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.079698 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.079722 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:45Z","lastTransitionTime":"2025-12-05T05:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.183422 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.183510 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.183533 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.183559 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.183580 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:45Z","lastTransitionTime":"2025-12-05T05:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.286328 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.286391 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.286408 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.286430 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.286450 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:45Z","lastTransitionTime":"2025-12-05T05:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.382809 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.382833 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.382853 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.382973 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:52:45 crc kubenswrapper[4742]: E1205 05:52:45.383199 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:52:45 crc kubenswrapper[4742]: E1205 05:52:45.383767 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:52:45 crc kubenswrapper[4742]: E1205 05:52:45.383934 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:52:45 crc kubenswrapper[4742]: E1205 05:52:45.384167 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.388718 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.388765 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.388784 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.388806 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.388825 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:45Z","lastTransitionTime":"2025-12-05T05:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.491966 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.492015 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.492029 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.492048 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.492077 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:45Z","lastTransitionTime":"2025-12-05T05:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.595568 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.595628 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.595645 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.595668 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.595687 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:45Z","lastTransitionTime":"2025-12-05T05:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.694916 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b69352e1-2d48-4211-83e1-25d09fff9d3c-metrics-certs\") pod \"network-metrics-daemon-pbtb4\" (UID: \"b69352e1-2d48-4211-83e1-25d09fff9d3c\") " pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:52:45 crc kubenswrapper[4742]: E1205 05:52:45.695158 4742 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 05:52:45 crc kubenswrapper[4742]: E1205 05:52:45.695243 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b69352e1-2d48-4211-83e1-25d09fff9d3c-metrics-certs podName:b69352e1-2d48-4211-83e1-25d09fff9d3c nodeName:}" failed. No retries permitted until 2025-12-05 05:52:53.695218854 +0000 UTC m=+49.607353956 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b69352e1-2d48-4211-83e1-25d09fff9d3c-metrics-certs") pod "network-metrics-daemon-pbtb4" (UID: "b69352e1-2d48-4211-83e1-25d09fff9d3c") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.699263 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.699331 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.699352 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.699378 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.699403 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:45Z","lastTransitionTime":"2025-12-05T05:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.803379 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.803457 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.803484 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.803516 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.803544 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:45Z","lastTransitionTime":"2025-12-05T05:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.906577 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.906650 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.906671 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.906696 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:45 crc kubenswrapper[4742]: I1205 05:52:45.906714 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:45Z","lastTransitionTime":"2025-12-05T05:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.015434 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.015514 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.015538 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.015570 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.015595 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:46Z","lastTransitionTime":"2025-12-05T05:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.118320 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.118378 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.118396 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.118421 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.118439 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:46Z","lastTransitionTime":"2025-12-05T05:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.221257 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.221337 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.221589 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.221648 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.221673 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:46Z","lastTransitionTime":"2025-12-05T05:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.325143 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.325227 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.325277 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.325302 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.325319 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:46Z","lastTransitionTime":"2025-12-05T05:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.427994 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.428096 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.428116 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.428137 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.428154 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:46Z","lastTransitionTime":"2025-12-05T05:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.493966 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.494032 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.494053 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.494124 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.494146 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:46Z","lastTransitionTime":"2025-12-05T05:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:46 crc kubenswrapper[4742]: E1205 05:52:46.510539 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:46Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.515581 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.515623 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.515642 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.515663 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.515681 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:46Z","lastTransitionTime":"2025-12-05T05:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:46 crc kubenswrapper[4742]: E1205 05:52:46.535960 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:46Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.540918 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.540980 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.540998 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.541018 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.541037 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:46Z","lastTransitionTime":"2025-12-05T05:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:46 crc kubenswrapper[4742]: E1205 05:52:46.558696 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:46Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.564224 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.564286 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.564308 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.564338 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.564358 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:46Z","lastTransitionTime":"2025-12-05T05:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:46 crc kubenswrapper[4742]: E1205 05:52:46.582039 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:46Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.586942 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.586972 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.586983 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.587001 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.587014 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:46Z","lastTransitionTime":"2025-12-05T05:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:46 crc kubenswrapper[4742]: E1205 05:52:46.603850 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:46Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:46 crc kubenswrapper[4742]: E1205 05:52:46.603987 4742 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.605502 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.605571 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.605586 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.605607 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.605622 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:46Z","lastTransitionTime":"2025-12-05T05:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.708495 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.708554 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.708572 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.708596 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.708614 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:46Z","lastTransitionTime":"2025-12-05T05:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.812526 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.812579 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.812596 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.812619 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.812635 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:46Z","lastTransitionTime":"2025-12-05T05:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.915273 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.915311 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.915321 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.915336 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:46 crc kubenswrapper[4742]: I1205 05:52:46.915346 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:46Z","lastTransitionTime":"2025-12-05T05:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.017670 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.017767 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.017784 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.017810 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.017829 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:47Z","lastTransitionTime":"2025-12-05T05:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.120875 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.120935 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.120953 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.120976 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.120992 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:47Z","lastTransitionTime":"2025-12-05T05:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.223572 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.223613 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.223624 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.223638 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.223650 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:47Z","lastTransitionTime":"2025-12-05T05:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.325916 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.325978 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.325995 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.326019 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.326037 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:47Z","lastTransitionTime":"2025-12-05T05:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.381767 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.381824 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.381946 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:52:47 crc kubenswrapper[4742]: E1205 05:52:47.382172 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.382235 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:47 crc kubenswrapper[4742]: E1205 05:52:47.382429 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:52:47 crc kubenswrapper[4742]: E1205 05:52:47.382523 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:52:47 crc kubenswrapper[4742]: E1205 05:52:47.382810 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.428288 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.428346 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.428366 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.428391 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.428410 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:47Z","lastTransitionTime":"2025-12-05T05:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.532520 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.532600 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.532634 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.532666 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.532687 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:47Z","lastTransitionTime":"2025-12-05T05:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.636285 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.636363 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.636382 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.636409 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.636438 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:47Z","lastTransitionTime":"2025-12-05T05:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.739701 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.739768 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.739791 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.739819 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.739841 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:47Z","lastTransitionTime":"2025-12-05T05:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.842639 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.842699 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.842714 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.842736 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.842751 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:47Z","lastTransitionTime":"2025-12-05T05:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.946269 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.946353 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.946373 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.946399 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:47 crc kubenswrapper[4742]: I1205 05:52:47.946418 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:47Z","lastTransitionTime":"2025-12-05T05:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.049189 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.049252 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.049291 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.049321 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.049343 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:48Z","lastTransitionTime":"2025-12-05T05:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.152396 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.152464 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.152486 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.152513 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.152536 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:48Z","lastTransitionTime":"2025-12-05T05:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.255162 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.255248 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.255283 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.255315 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.255338 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:48Z","lastTransitionTime":"2025-12-05T05:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.358476 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.358534 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.358556 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.358634 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.358660 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:48Z","lastTransitionTime":"2025-12-05T05:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.461536 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.461604 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.461626 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.461655 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.461677 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:48Z","lastTransitionTime":"2025-12-05T05:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.564740 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.564812 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.564846 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.564876 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.564898 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:48Z","lastTransitionTime":"2025-12-05T05:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.668102 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.668212 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.668227 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.668246 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.668260 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:48Z","lastTransitionTime":"2025-12-05T05:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.771359 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.771403 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.771415 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.771436 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.771452 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:48Z","lastTransitionTime":"2025-12-05T05:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.873801 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.873839 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.873850 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.873865 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.873877 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:48Z","lastTransitionTime":"2025-12-05T05:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.977512 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.977583 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.977612 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.977644 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:48 crc kubenswrapper[4742]: I1205 05:52:48.977668 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:48Z","lastTransitionTime":"2025-12-05T05:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.080634 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.080713 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.080739 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.080770 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.080794 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:49Z","lastTransitionTime":"2025-12-05T05:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.183792 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.183898 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.183915 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.183939 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.183956 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:49Z","lastTransitionTime":"2025-12-05T05:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.286847 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.286915 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.286931 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.286954 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.286970 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:49Z","lastTransitionTime":"2025-12-05T05:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.382676 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.382780 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.382718 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.382905 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:52:49 crc kubenswrapper[4742]: E1205 05:52:49.383151 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:52:49 crc kubenswrapper[4742]: E1205 05:52:49.383299 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:52:49 crc kubenswrapper[4742]: E1205 05:52:49.383374 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:52:49 crc kubenswrapper[4742]: E1205 05:52:49.383423 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.389513 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.389609 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.389629 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.389650 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.389711 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:49Z","lastTransitionTime":"2025-12-05T05:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.492897 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.492938 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.492950 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.492968 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.492980 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:49Z","lastTransitionTime":"2025-12-05T05:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.595166 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.595224 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.595241 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.595262 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.595278 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:49Z","lastTransitionTime":"2025-12-05T05:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.697723 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.697769 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.697780 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.697796 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.697807 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:49Z","lastTransitionTime":"2025-12-05T05:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.800538 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.800581 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.800592 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.800604 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.800612 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:49Z","lastTransitionTime":"2025-12-05T05:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.903634 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.903682 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.903699 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.903721 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:49 crc kubenswrapper[4742]: I1205 05:52:49.903740 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:49Z","lastTransitionTime":"2025-12-05T05:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.006606 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.006659 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.006673 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.006695 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.006708 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:50Z","lastTransitionTime":"2025-12-05T05:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.110326 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.110392 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.110408 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.110432 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.110448 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:50Z","lastTransitionTime":"2025-12-05T05:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.213725 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.213790 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.213837 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.213867 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.213888 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:50Z","lastTransitionTime":"2025-12-05T05:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.317904 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.318006 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.318030 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.318058 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.318114 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:50Z","lastTransitionTime":"2025-12-05T05:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.420150 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.420181 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.420192 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.420217 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.420231 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:50Z","lastTransitionTime":"2025-12-05T05:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.522685 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.522737 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.522754 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.522793 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.522812 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:50Z","lastTransitionTime":"2025-12-05T05:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.625975 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.626162 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.626189 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.626218 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.626241 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:50Z","lastTransitionTime":"2025-12-05T05:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.729417 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.729491 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.729515 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.729546 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.729568 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:50Z","lastTransitionTime":"2025-12-05T05:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.832029 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.832166 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.832191 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.832225 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.832249 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:50Z","lastTransitionTime":"2025-12-05T05:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.935012 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.935205 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.935256 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.935287 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:50 crc kubenswrapper[4742]: I1205 05:52:50.935306 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:50Z","lastTransitionTime":"2025-12-05T05:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.038499 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.038554 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.038564 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.038581 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.038599 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:51Z","lastTransitionTime":"2025-12-05T05:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.141795 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.141858 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.141869 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.141888 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.141900 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:51Z","lastTransitionTime":"2025-12-05T05:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.244339 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.244433 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.244462 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.244499 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.244528 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:51Z","lastTransitionTime":"2025-12-05T05:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.348232 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.348293 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.348312 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.348339 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.348359 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:51Z","lastTransitionTime":"2025-12-05T05:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.382871 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.382893 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.382933 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.382967 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:52:51 crc kubenswrapper[4742]: E1205 05:52:51.383206 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:52:51 crc kubenswrapper[4742]: E1205 05:52:51.383360 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:52:51 crc kubenswrapper[4742]: E1205 05:52:51.383991 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:52:51 crc kubenswrapper[4742]: E1205 05:52:51.384213 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.384842 4742 scope.go:117] "RemoveContainer" containerID="c6e54b12a8993b397af3908cc8042f2907d863bb09c092d247bff3383534d624" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.403918 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:51Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.427809 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:51Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.443924 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:51Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.452449 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.452487 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.452499 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.452517 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.452531 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:51Z","lastTransitionTime":"2025-12-05T05:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.463613 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"056de541-2d3a-4782-a2cc-0c96c465ca6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff553f2d53d8dbc9847c9055d7f9bf398e1ce8a955100a41e958800ad730e4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbe2749cda239484a91be5aeb17a4d22cf1e190e814880af130c97a903ee8d33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hv29p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:51Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.480963 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pbtb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69352e1-2d48-4211-83e1-25d09fff9d3c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pbtb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:51Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.519847 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:51Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.542988 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:51Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.555379 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.555422 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.555435 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.555452 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.555465 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:51Z","lastTransitionTime":"2025-12-05T05:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.565240 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:51Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.581794 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:51Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.606193 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:51Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.625641 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:51Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.644105 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:51Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.661646 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.661707 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.661725 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.661749 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.661767 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:51Z","lastTransitionTime":"2025-12-05T05:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.663343 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:51Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.682603 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:51Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.700211 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:51Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.719523 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581c48baad5c52f537dbeef4685c29e54b3748256140bac370782fe889a4330d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:51Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.738406 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6e54b12a8993b397af3908cc8042f2907d863bb09c092d247bff3383534d624\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6e54b12a8993b397af3908cc8042f2907d863bb09c092d247bff3383534d624\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"lumn:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:35.307762 6146 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI1205 05:52:35.308091 6146 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1205 05:52:35.307897 6146 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-image-registry/image-registry]} name:Service_openshift-image-registry/image-registry_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.93:5000:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {83c1e277-3d22-42ae-a355-f7a0ff0bd171}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:35.308134 6146 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-no\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:34Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-m9jc4_openshift-ovn-kubernetes(06ddc689-50f2-409f-9ac8-8f6a1bed0831)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:51Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.764561 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.764627 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.764648 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.764676 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.764696 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:51Z","lastTransitionTime":"2025-12-05T05:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.867614 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.867685 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.867705 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.867730 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.867751 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:51Z","lastTransitionTime":"2025-12-05T05:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.970995 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.971067 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.971122 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.971152 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:51 crc kubenswrapper[4742]: I1205 05:52:51.971174 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:51Z","lastTransitionTime":"2025-12-05T05:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.019098 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m9jc4_06ddc689-50f2-409f-9ac8-8f6a1bed0831/ovnkube-controller/1.log" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.023000 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" event={"ID":"06ddc689-50f2-409f-9ac8-8f6a1bed0831","Type":"ContainerStarted","Data":"40915a8bc31b8415ebbb1fdfb58ad53dcbae69ac9737c949eabfcd821200ceb5"} Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.023633 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.046471 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pbtb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69352e1-2d48-4211-83e1-25d09fff9d3c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pbtb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:52Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.062584 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:52Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.073196 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.073235 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.073245 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.073259 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.073269 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:52Z","lastTransitionTime":"2025-12-05T05:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.082687 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:52Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.093472 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:52Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.106076 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"056de541-2d3a-4782-a2cc-0c96c465ca6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff553f2d53d8dbc9847c9055d7f9bf398e1ce8a955100a41e958800ad730e4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbe2749cda239484a91be5aeb17a4d22cf1e190e814880af130c97a903ee8d33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hv29p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:52Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.124927 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:52Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.137626 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:52Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.154306 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:52Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.166158 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:52Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.175643 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.175682 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.175692 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.175728 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.175739 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:52Z","lastTransitionTime":"2025-12-05T05:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.181785 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:52Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.201892 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:52Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.214987 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:52Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.226541 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:52Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.242525 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40915a8bc31b8415ebbb1fdfb58ad53dcbae69ac9737c949eabfcd821200ceb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6e54b12a8993b397af3908cc8042f2907d863bb09c092d247bff3383534d624\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"lumn:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:35.307762 6146 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI1205 05:52:35.308091 6146 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1205 05:52:35.307897 6146 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-image-registry/image-registry]} name:Service_openshift-image-registry/image-registry_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.93:5000:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {83c1e277-3d22-42ae-a355-f7a0ff0bd171}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:35.308134 6146 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-no\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:34Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:52Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.252033 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:52Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.263660 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:52Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.277616 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.277648 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.277657 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.277670 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.277679 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:52Z","lastTransitionTime":"2025-12-05T05:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.280728 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581c48baad5c52f537dbeef4685c29e54b3748256140bac370782fe889a4330d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:52Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.380809 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.380846 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.380856 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.380872 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.380885 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:52Z","lastTransitionTime":"2025-12-05T05:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.483923 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.483972 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.483988 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.484009 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.484026 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:52Z","lastTransitionTime":"2025-12-05T05:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.587049 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.587141 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.587161 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.587187 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.587206 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:52Z","lastTransitionTime":"2025-12-05T05:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.691506 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.691581 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.691620 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.691665 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.691690 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:52Z","lastTransitionTime":"2025-12-05T05:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.795616 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.795675 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.795692 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.795716 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.795733 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:52Z","lastTransitionTime":"2025-12-05T05:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.899881 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.899946 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.899963 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.900466 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:52 crc kubenswrapper[4742]: I1205 05:52:52.900530 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:52Z","lastTransitionTime":"2025-12-05T05:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.003730 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.003784 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.003829 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.003845 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.003857 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:53Z","lastTransitionTime":"2025-12-05T05:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.030503 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m9jc4_06ddc689-50f2-409f-9ac8-8f6a1bed0831/ovnkube-controller/2.log" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.031663 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m9jc4_06ddc689-50f2-409f-9ac8-8f6a1bed0831/ovnkube-controller/1.log" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.036291 4742 generic.go:334] "Generic (PLEG): container finished" podID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerID="40915a8bc31b8415ebbb1fdfb58ad53dcbae69ac9737c949eabfcd821200ceb5" exitCode=1 Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.036379 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" event={"ID":"06ddc689-50f2-409f-9ac8-8f6a1bed0831","Type":"ContainerDied","Data":"40915a8bc31b8415ebbb1fdfb58ad53dcbae69ac9737c949eabfcd821200ceb5"} Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.036469 4742 scope.go:117] "RemoveContainer" containerID="c6e54b12a8993b397af3908cc8042f2907d863bb09c092d247bff3383534d624" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.037496 4742 scope.go:117] "RemoveContainer" containerID="40915a8bc31b8415ebbb1fdfb58ad53dcbae69ac9737c949eabfcd821200ceb5" Dec 05 05:52:53 crc kubenswrapper[4742]: E1205 05:52:53.037771 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-m9jc4_openshift-ovn-kubernetes(06ddc689-50f2-409f-9ac8-8f6a1bed0831)\"" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.058863 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:53Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.075852 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:53Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.093968 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:53Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.107564 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.107617 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.107634 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.107657 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.107673 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:53Z","lastTransitionTime":"2025-12-05T05:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.114256 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:53Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.130046 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:53Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.147223 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:53Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.167175 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:53Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.189453 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581c48baad5c52f537dbeef4685c29e54b3748256140bac370782fe889a4330d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:53Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.210222 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.210275 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.210287 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.210309 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.210322 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:53Z","lastTransitionTime":"2025-12-05T05:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.219393 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40915a8bc31b8415ebbb1fdfb58ad53dcbae69ac9737c949eabfcd821200ceb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c6e54b12a8993b397af3908cc8042f2907d863bb09c092d247bff3383534d624\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"message\\\":\\\"lumn:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:35.307762 6146 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in node crc\\\\nI1205 05:52:35.308091 6146 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1205 05:52:35.307897 6146 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-image-registry/image-registry]} name:Service_openshift-image-registry/image-registry_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.93:5000:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {83c1e277-3d22-42ae-a355-f7a0ff0bd171}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:35.308134 6146 default_network_controller.go:776] Recording success event on pod openshift-network-node-identity/network-no\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:34Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40915a8bc31b8415ebbb1fdfb58ad53dcbae69ac9737c949eabfcd821200ceb5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:52:52Z\\\",\\\"message\\\":\\\"94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:52.433832 6371 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-ttdt8\\\\nI1205 05:52:52.433778 6371 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:52.433851 6371 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-ttdt8 in node crc\\\\nI1205 05:52:52.433864 6371 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-ttdt8 after 0 failed attempt(s)\\\\nI1205 05:52:52.433890 6371 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-ttdt8\\\\nI1205 05:52:52.433835 6371 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/machine-api-operator-machine-webhook]} name:Service_openshift-machine-api/mac\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:53Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.234025 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:53Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.249687 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:53Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.261970 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:53Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.276229 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"056de541-2d3a-4782-a2cc-0c96c465ca6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff553f2d53d8dbc9847c9055d7f9bf398e1ce8a955100a41e958800ad730e4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbe2749cda239484a91be5aeb17a4d22cf1e190e814880af130c97a903ee8d33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hv29p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:53Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.288863 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pbtb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69352e1-2d48-4211-83e1-25d09fff9d3c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pbtb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:53Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.312499 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:53Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.313904 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.313978 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.314023 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.314099 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.314126 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:53Z","lastTransitionTime":"2025-12-05T05:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.329593 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:53Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.345104 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:53Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.382818 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.382856 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.382901 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.382960 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:52:53 crc kubenswrapper[4742]: E1205 05:52:53.383131 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:52:53 crc kubenswrapper[4742]: E1205 05:52:53.383335 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:52:53 crc kubenswrapper[4742]: E1205 05:52:53.383377 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:52:53 crc kubenswrapper[4742]: E1205 05:52:53.383488 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.418160 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.418215 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.418232 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.418260 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.418278 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:53Z","lastTransitionTime":"2025-12-05T05:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.520652 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.520725 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.520744 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.520767 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.520781 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:53Z","lastTransitionTime":"2025-12-05T05:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.624110 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.624174 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.624192 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.624216 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.624236 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:53Z","lastTransitionTime":"2025-12-05T05:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.727775 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.727843 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.727867 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.727896 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.727916 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:53Z","lastTransitionTime":"2025-12-05T05:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.787776 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b69352e1-2d48-4211-83e1-25d09fff9d3c-metrics-certs\") pod \"network-metrics-daemon-pbtb4\" (UID: \"b69352e1-2d48-4211-83e1-25d09fff9d3c\") " pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:52:53 crc kubenswrapper[4742]: E1205 05:52:53.788003 4742 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 05:52:53 crc kubenswrapper[4742]: E1205 05:52:53.788138 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b69352e1-2d48-4211-83e1-25d09fff9d3c-metrics-certs podName:b69352e1-2d48-4211-83e1-25d09fff9d3c nodeName:}" failed. No retries permitted until 2025-12-05 05:53:09.788102645 +0000 UTC m=+65.700237747 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b69352e1-2d48-4211-83e1-25d09fff9d3c-metrics-certs") pod "network-metrics-daemon-pbtb4" (UID: "b69352e1-2d48-4211-83e1-25d09fff9d3c") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.831929 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.831994 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.832013 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.832038 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.832085 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:53Z","lastTransitionTime":"2025-12-05T05:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.935932 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.935990 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.936007 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.936030 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:53 crc kubenswrapper[4742]: I1205 05:52:53.936047 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:53Z","lastTransitionTime":"2025-12-05T05:52:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.039957 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.040004 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.040024 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.040052 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.040124 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:54Z","lastTransitionTime":"2025-12-05T05:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.049050 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m9jc4_06ddc689-50f2-409f-9ac8-8f6a1bed0831/ovnkube-controller/2.log" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.056265 4742 scope.go:117] "RemoveContainer" containerID="40915a8bc31b8415ebbb1fdfb58ad53dcbae69ac9737c949eabfcd821200ceb5" Dec 05 05:52:54 crc kubenswrapper[4742]: E1205 05:52:54.056591 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-m9jc4_openshift-ovn-kubernetes(06ddc689-50f2-409f-9ac8-8f6a1bed0831)\"" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.076678 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.097219 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.115940 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.133131 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.143653 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.143702 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.143719 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.143742 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.143760 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:54Z","lastTransitionTime":"2025-12-05T05:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.153780 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.172552 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.193612 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.218256 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581c48baad5c52f537dbeef4685c29e54b3748256140bac370782fe889a4330d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.247121 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.247163 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.247175 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.247194 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.247204 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:54Z","lastTransitionTime":"2025-12-05T05:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.250415 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40915a8bc31b8415ebbb1fdfb58ad53dcbae69ac9737c949eabfcd821200ceb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40915a8bc31b8415ebbb1fdfb58ad53dcbae69ac9737c949eabfcd821200ceb5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:52:52Z\\\",\\\"message\\\":\\\"94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:52.433832 6371 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-ttdt8\\\\nI1205 05:52:52.433778 6371 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:52.433851 6371 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-ttdt8 in node crc\\\\nI1205 05:52:52.433864 6371 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-ttdt8 after 0 failed attempt(s)\\\\nI1205 05:52:52.433890 6371 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-ttdt8\\\\nI1205 05:52:52.433835 6371 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/machine-api-operator-machine-webhook]} name:Service_openshift-machine-api/mac\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-m9jc4_openshift-ovn-kubernetes(06ddc689-50f2-409f-9ac8-8f6a1bed0831)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.270984 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.286518 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.311524 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"056de541-2d3a-4782-a2cc-0c96c465ca6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff553f2d53d8dbc9847c9055d7f9bf398e1ce8a955100a41e958800ad730e4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbe2749cda239484a91be5aeb17a4d22cf1e190e814880af130c97a903ee8d33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hv29p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.330238 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pbtb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69352e1-2d48-4211-83e1-25d09fff9d3c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pbtb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.347193 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.349558 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.349598 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.349610 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.349628 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.349643 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:54Z","lastTransitionTime":"2025-12-05T05:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.371258 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.388036 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.406181 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.419353 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.438710 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.452476 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.452535 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.452552 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.452574 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.452591 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:54Z","lastTransitionTime":"2025-12-05T05:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.455872 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.475698 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"056de541-2d3a-4782-a2cc-0c96c465ca6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff553f2d53d8dbc9847c9055d7f9bf398e1ce8a955100a41e958800ad730e4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbe2749cda239484a91be5aeb17a4d22cf1e190e814880af130c97a903ee8d33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hv29p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.494679 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pbtb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69352e1-2d48-4211-83e1-25d09fff9d3c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pbtb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.526445 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.549502 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.555110 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.555192 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.555222 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.555258 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.555283 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:54Z","lastTransitionTime":"2025-12-05T05:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.568767 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.587825 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.606226 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.622787 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.638121 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.656272 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.657749 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.657805 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.657820 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.657838 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.657851 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:54Z","lastTransitionTime":"2025-12-05T05:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.672612 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.693236 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.716201 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581c48baad5c52f537dbeef4685c29e54b3748256140bac370782fe889a4330d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.748222 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40915a8bc31b8415ebbb1fdfb58ad53dcbae69ac9737c949eabfcd821200ceb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40915a8bc31b8415ebbb1fdfb58ad53dcbae69ac9737c949eabfcd821200ceb5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:52:52Z\\\",\\\"message\\\":\\\"94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:52.433832 6371 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-ttdt8\\\\nI1205 05:52:52.433778 6371 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:52.433851 6371 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-ttdt8 in node crc\\\\nI1205 05:52:52.433864 6371 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-ttdt8 after 0 failed attempt(s)\\\\nI1205 05:52:52.433890 6371 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-ttdt8\\\\nI1205 05:52:52.433835 6371 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/machine-api-operator-machine-webhook]} name:Service_openshift-machine-api/mac\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-m9jc4_openshift-ovn-kubernetes(06ddc689-50f2-409f-9ac8-8f6a1bed0831)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.760758 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.760794 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.760806 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.760821 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.760833 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:54Z","lastTransitionTime":"2025-12-05T05:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.863916 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.863989 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.864009 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.864118 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.864153 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:54Z","lastTransitionTime":"2025-12-05T05:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.967532 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.967578 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.967595 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.967617 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:54 crc kubenswrapper[4742]: I1205 05:52:54.967636 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:54Z","lastTransitionTime":"2025-12-05T05:52:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.070421 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.070459 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.070469 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.070486 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.070498 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:55Z","lastTransitionTime":"2025-12-05T05:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.174307 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.174378 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.174402 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.174431 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.174455 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:55Z","lastTransitionTime":"2025-12-05T05:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.205541 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:52:55 crc kubenswrapper[4742]: E1205 05:52:55.205816 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:53:27.205797645 +0000 UTC m=+83.117932707 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.277262 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.277311 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.277364 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.277417 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.277436 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:55Z","lastTransitionTime":"2025-12-05T05:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.307095 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.307163 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.307198 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.307226 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:55 crc kubenswrapper[4742]: E1205 05:52:55.307333 4742 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 05:52:55 crc kubenswrapper[4742]: E1205 05:52:55.307387 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 05:53:27.307371316 +0000 UTC m=+83.219506388 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 05:52:55 crc kubenswrapper[4742]: E1205 05:52:55.307530 4742 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 05:52:55 crc kubenswrapper[4742]: E1205 05:52:55.307556 4742 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 05:52:55 crc kubenswrapper[4742]: E1205 05:52:55.307567 4742 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:52:55 crc kubenswrapper[4742]: E1205 05:52:55.307580 4742 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 05:52:55 crc kubenswrapper[4742]: E1205 05:52:55.307621 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 05:53:27.307605042 +0000 UTC m=+83.219740104 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:52:55 crc kubenswrapper[4742]: E1205 05:52:55.307665 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 05:53:27.307642353 +0000 UTC m=+83.219777425 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 05:52:55 crc kubenswrapper[4742]: E1205 05:52:55.307588 4742 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 05:52:55 crc kubenswrapper[4742]: E1205 05:52:55.307697 4742 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 05:52:55 crc kubenswrapper[4742]: E1205 05:52:55.307712 4742 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:52:55 crc kubenswrapper[4742]: E1205 05:52:55.307759 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 05:53:27.307750626 +0000 UTC m=+83.219885698 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.379350 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.379414 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.379431 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.379456 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.379476 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:55Z","lastTransitionTime":"2025-12-05T05:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.382498 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.382570 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:52:55 crc kubenswrapper[4742]: E1205 05:52:55.382605 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.382655 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.382707 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:52:55 crc kubenswrapper[4742]: E1205 05:52:55.382827 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:52:55 crc kubenswrapper[4742]: E1205 05:52:55.382930 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:52:55 crc kubenswrapper[4742]: E1205 05:52:55.383018 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.482337 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.482367 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.482379 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.482394 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.482406 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:55Z","lastTransitionTime":"2025-12-05T05:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.488497 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.496946 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.504044 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.520906 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.536859 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.555654 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.572473 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.585094 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.585139 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.585158 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.585182 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.585199 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:55Z","lastTransitionTime":"2025-12-05T05:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.592753 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40915a8bc31b8415ebbb1fdfb58ad53dcbae69ac9737c949eabfcd821200ceb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40915a8bc31b8415ebbb1fdfb58ad53dcbae69ac9737c949eabfcd821200ceb5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:52:52Z\\\",\\\"message\\\":\\\"94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:52.433832 6371 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-ttdt8\\\\nI1205 05:52:52.433778 6371 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:52.433851 6371 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-ttdt8 in node crc\\\\nI1205 05:52:52.433864 6371 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-ttdt8 after 0 failed attempt(s)\\\\nI1205 05:52:52.433890 6371 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-ttdt8\\\\nI1205 05:52:52.433835 6371 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/machine-api-operator-machine-webhook]} name:Service_openshift-machine-api/mac\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-m9jc4_openshift-ovn-kubernetes(06ddc689-50f2-409f-9ac8-8f6a1bed0831)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.611406 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.628659 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.643348 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581c48baad5c52f537dbeef4685c29e54b3748256140bac370782fe889a4330d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.658002 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pbtb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69352e1-2d48-4211-83e1-25d09fff9d3c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pbtb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.671649 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.685693 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.688413 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.688491 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.688517 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.688548 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.688573 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:55Z","lastTransitionTime":"2025-12-05T05:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.701471 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.714979 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"056de541-2d3a-4782-a2cc-0c96c465ca6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff553f2d53d8dbc9847c9055d7f9bf398e1ce8a955100a41e958800ad730e4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbe2749cda239484a91be5aeb17a4d22cf1e190e814880af130c97a903ee8d33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hv29p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.736003 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.752793 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.768573 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:55Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.796489 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.796551 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.796568 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.796589 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.796604 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:55Z","lastTransitionTime":"2025-12-05T05:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.900220 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.900295 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.900319 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.900349 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:55 crc kubenswrapper[4742]: I1205 05:52:55.900370 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:55Z","lastTransitionTime":"2025-12-05T05:52:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.003360 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.003416 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.003434 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.003456 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.003472 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:56Z","lastTransitionTime":"2025-12-05T05:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.105876 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.105925 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.105937 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.105953 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.105965 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:56Z","lastTransitionTime":"2025-12-05T05:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.208810 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.208857 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.208868 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.208884 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.208893 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:56Z","lastTransitionTime":"2025-12-05T05:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.312216 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.312262 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.312275 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.312294 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.312306 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:56Z","lastTransitionTime":"2025-12-05T05:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.415044 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.415143 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.415162 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.415188 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.415208 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:56Z","lastTransitionTime":"2025-12-05T05:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.518006 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.518096 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.518113 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.518139 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.518154 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:56Z","lastTransitionTime":"2025-12-05T05:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.621477 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.621554 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.621579 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.621612 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.621640 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:56Z","lastTransitionTime":"2025-12-05T05:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.724252 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.724334 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.724360 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.724391 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.724418 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:56Z","lastTransitionTime":"2025-12-05T05:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.823533 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.823571 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.823582 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.823597 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.823608 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:56Z","lastTransitionTime":"2025-12-05T05:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:56 crc kubenswrapper[4742]: E1205 05:52:56.842032 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.846809 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.846906 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.846931 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.846961 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.846983 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:56Z","lastTransitionTime":"2025-12-05T05:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:56 crc kubenswrapper[4742]: E1205 05:52:56.867404 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.872251 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.872296 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.872310 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.872326 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.872336 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:56Z","lastTransitionTime":"2025-12-05T05:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:56 crc kubenswrapper[4742]: E1205 05:52:56.908132 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.913105 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.913144 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.913159 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.913180 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.913195 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:56Z","lastTransitionTime":"2025-12-05T05:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:56 crc kubenswrapper[4742]: E1205 05:52:56.937050 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.942154 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.942194 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.942205 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.942226 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.942239 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:56Z","lastTransitionTime":"2025-12-05T05:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:56 crc kubenswrapper[4742]: E1205 05:52:56.958666 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:52:56Z is after 2025-08-24T17:21:41Z" Dec 05 05:52:56 crc kubenswrapper[4742]: E1205 05:52:56.958778 4742 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.960166 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.960193 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.960201 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.960212 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:56 crc kubenswrapper[4742]: I1205 05:52:56.960222 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:56Z","lastTransitionTime":"2025-12-05T05:52:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.062436 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.062481 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.062495 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.062514 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.062528 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:57Z","lastTransitionTime":"2025-12-05T05:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.167624 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.167678 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.167693 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.167728 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.167742 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:57Z","lastTransitionTime":"2025-12-05T05:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.271013 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.271530 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.271754 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.271952 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.272170 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:57Z","lastTransitionTime":"2025-12-05T05:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.375844 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.376131 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.376163 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.376193 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.376213 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:57Z","lastTransitionTime":"2025-12-05T05:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.382390 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.382424 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.382432 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.382547 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:52:57 crc kubenswrapper[4742]: E1205 05:52:57.382576 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:52:57 crc kubenswrapper[4742]: E1205 05:52:57.382673 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:52:57 crc kubenswrapper[4742]: E1205 05:52:57.382773 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:52:57 crc kubenswrapper[4742]: E1205 05:52:57.382854 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.478778 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.478855 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.478873 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.478897 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.478916 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:57Z","lastTransitionTime":"2025-12-05T05:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.582184 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.582252 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.582269 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.582293 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.582310 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:57Z","lastTransitionTime":"2025-12-05T05:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.686038 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.686187 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.686223 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.686251 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.686270 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:57Z","lastTransitionTime":"2025-12-05T05:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.789229 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.789297 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.789318 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.789345 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.789367 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:57Z","lastTransitionTime":"2025-12-05T05:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.893098 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.893163 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.893179 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.893204 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.893221 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:57Z","lastTransitionTime":"2025-12-05T05:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.996037 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.996146 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.996171 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.996204 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:57 crc kubenswrapper[4742]: I1205 05:52:57.996241 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:57Z","lastTransitionTime":"2025-12-05T05:52:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.099028 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.099125 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.099148 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.099171 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.099188 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:58Z","lastTransitionTime":"2025-12-05T05:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.202292 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.202333 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.202344 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.202361 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.202372 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:58Z","lastTransitionTime":"2025-12-05T05:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.305172 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.305204 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.305212 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.305224 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.305233 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:58Z","lastTransitionTime":"2025-12-05T05:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.408004 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.408097 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.408117 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.408141 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.408158 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:58Z","lastTransitionTime":"2025-12-05T05:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.510511 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.510559 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.510576 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.510598 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.510615 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:58Z","lastTransitionTime":"2025-12-05T05:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.613173 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.613237 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.613253 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.613276 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.613293 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:58Z","lastTransitionTime":"2025-12-05T05:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.715771 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.715812 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.715821 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.715833 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.715842 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:58Z","lastTransitionTime":"2025-12-05T05:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.818000 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.818047 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.818068 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.818084 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.818093 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:58Z","lastTransitionTime":"2025-12-05T05:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.921211 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.921281 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.921297 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.921321 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:58 crc kubenswrapper[4742]: I1205 05:52:58.921339 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:58Z","lastTransitionTime":"2025-12-05T05:52:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.023326 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.023378 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.023392 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.023407 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.023418 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:59Z","lastTransitionTime":"2025-12-05T05:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.126235 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.126307 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.126317 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.126331 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.126341 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:59Z","lastTransitionTime":"2025-12-05T05:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.229753 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.229829 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.229849 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.229877 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.229897 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:59Z","lastTransitionTime":"2025-12-05T05:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.332782 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.332838 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.332851 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.332869 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.332882 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:59Z","lastTransitionTime":"2025-12-05T05:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.382380 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.382482 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.382386 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:52:59 crc kubenswrapper[4742]: E1205 05:52:59.382549 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.382412 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:52:59 crc kubenswrapper[4742]: E1205 05:52:59.382631 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:52:59 crc kubenswrapper[4742]: E1205 05:52:59.382781 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:52:59 crc kubenswrapper[4742]: E1205 05:52:59.382906 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.436132 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.436196 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.436220 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.436248 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.436267 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:59Z","lastTransitionTime":"2025-12-05T05:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.539598 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.539671 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.539687 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.539712 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.539730 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:59Z","lastTransitionTime":"2025-12-05T05:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.642942 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.643004 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.643024 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.643050 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.643100 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:59Z","lastTransitionTime":"2025-12-05T05:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.746248 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.746311 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.746336 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.746367 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.746389 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:59Z","lastTransitionTime":"2025-12-05T05:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.853239 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.853516 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.854446 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.854504 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.854528 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:59Z","lastTransitionTime":"2025-12-05T05:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.957963 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.958027 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.958046 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.958118 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:52:59 crc kubenswrapper[4742]: I1205 05:52:59.958138 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:52:59Z","lastTransitionTime":"2025-12-05T05:52:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.061381 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.061435 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.061452 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.061475 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.061493 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:00Z","lastTransitionTime":"2025-12-05T05:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.164121 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.164194 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.164212 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.164250 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.164290 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:00Z","lastTransitionTime":"2025-12-05T05:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.267575 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.267647 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.267668 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.267692 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.267710 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:00Z","lastTransitionTime":"2025-12-05T05:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.371035 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.371148 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.371166 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.371189 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.371206 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:00Z","lastTransitionTime":"2025-12-05T05:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.474354 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.474418 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.474436 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.474463 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.474481 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:00Z","lastTransitionTime":"2025-12-05T05:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.578145 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.578245 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.578284 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.578320 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.578342 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:00Z","lastTransitionTime":"2025-12-05T05:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.681680 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.681740 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.681758 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.681782 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.681801 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:00Z","lastTransitionTime":"2025-12-05T05:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.785401 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.785521 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.785544 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.785989 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.786226 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:00Z","lastTransitionTime":"2025-12-05T05:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.889308 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.889349 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.889361 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.889377 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.889389 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:00Z","lastTransitionTime":"2025-12-05T05:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.991554 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.991590 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.991598 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.991611 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:00 crc kubenswrapper[4742]: I1205 05:53:00.991620 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:00Z","lastTransitionTime":"2025-12-05T05:53:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.094207 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.094265 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.094281 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.094302 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.094321 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:01Z","lastTransitionTime":"2025-12-05T05:53:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.197118 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.197189 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.197211 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.197237 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.197257 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:01Z","lastTransitionTime":"2025-12-05T05:53:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.300541 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.300612 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.300629 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.300653 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.300671 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:01Z","lastTransitionTime":"2025-12-05T05:53:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.381859 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.381912 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.381971 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:53:01 crc kubenswrapper[4742]: E1205 05:53:01.382017 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.381860 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:53:01 crc kubenswrapper[4742]: E1205 05:53:01.382173 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:53:01 crc kubenswrapper[4742]: E1205 05:53:01.382233 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:53:01 crc kubenswrapper[4742]: E1205 05:53:01.382472 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.404204 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.404278 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.404304 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.404332 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.404355 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:01Z","lastTransitionTime":"2025-12-05T05:53:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.506560 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.506614 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.506630 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.506651 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.506667 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:01Z","lastTransitionTime":"2025-12-05T05:53:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.609861 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.609916 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.609933 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.609958 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.609975 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:01Z","lastTransitionTime":"2025-12-05T05:53:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.712941 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.713032 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.713049 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.713096 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.713115 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:01Z","lastTransitionTime":"2025-12-05T05:53:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.815799 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.815894 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.815920 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.815964 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.815993 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:01Z","lastTransitionTime":"2025-12-05T05:53:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.919880 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.920041 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.920112 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.920144 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:01 crc kubenswrapper[4742]: I1205 05:53:01.920170 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:01Z","lastTransitionTime":"2025-12-05T05:53:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.023801 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.023877 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.023913 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.023948 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.023974 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:02Z","lastTransitionTime":"2025-12-05T05:53:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.128046 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.128169 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.128212 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.128249 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.128273 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:02Z","lastTransitionTime":"2025-12-05T05:53:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.232425 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.233116 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.233306 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.233496 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.233688 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:02Z","lastTransitionTime":"2025-12-05T05:53:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.337720 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.338207 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.338491 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.338589 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.338670 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:02Z","lastTransitionTime":"2025-12-05T05:53:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.442752 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.442832 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.442852 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.442878 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.442897 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:02Z","lastTransitionTime":"2025-12-05T05:53:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.545920 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.545987 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.546007 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.546034 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.546052 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:02Z","lastTransitionTime":"2025-12-05T05:53:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.648645 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.648699 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.648716 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.648739 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.648757 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:02Z","lastTransitionTime":"2025-12-05T05:53:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.751891 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.751975 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.752011 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.752041 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.752101 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:02Z","lastTransitionTime":"2025-12-05T05:53:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.855731 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.855809 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.855831 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.855864 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.855883 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:02Z","lastTransitionTime":"2025-12-05T05:53:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.958782 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.958814 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.958822 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.958836 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:02 crc kubenswrapper[4742]: I1205 05:53:02.958845 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:02Z","lastTransitionTime":"2025-12-05T05:53:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.061974 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.062046 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.062097 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.062126 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.062147 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:03Z","lastTransitionTime":"2025-12-05T05:53:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.165424 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.165487 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.165503 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.165557 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.165575 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:03Z","lastTransitionTime":"2025-12-05T05:53:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.268624 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.268696 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.268711 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.268729 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.268745 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:03Z","lastTransitionTime":"2025-12-05T05:53:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.372169 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.372231 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.372283 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.372312 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.372334 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:03Z","lastTransitionTime":"2025-12-05T05:53:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.382803 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.382866 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.382909 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:53:03 crc kubenswrapper[4742]: E1205 05:53:03.382956 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.382820 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:53:03 crc kubenswrapper[4742]: E1205 05:53:03.383316 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:53:03 crc kubenswrapper[4742]: E1205 05:53:03.383446 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:53:03 crc kubenswrapper[4742]: E1205 05:53:03.383552 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.474782 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.474857 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.474882 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.474910 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.474932 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:03Z","lastTransitionTime":"2025-12-05T05:53:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.577016 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.577089 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.577101 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.577118 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.577130 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:03Z","lastTransitionTime":"2025-12-05T05:53:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.679838 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.679901 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.679918 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.679941 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.679958 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:03Z","lastTransitionTime":"2025-12-05T05:53:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.783340 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.783399 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.783414 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.783438 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.783450 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:03Z","lastTransitionTime":"2025-12-05T05:53:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.886211 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.886260 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.886270 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.886284 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.886293 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:03Z","lastTransitionTime":"2025-12-05T05:53:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.989110 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.989169 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.989186 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.989211 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:03 crc kubenswrapper[4742]: I1205 05:53:03.989229 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:03Z","lastTransitionTime":"2025-12-05T05:53:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.091803 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.091849 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.091861 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.091877 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.091889 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:04Z","lastTransitionTime":"2025-12-05T05:53:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.195154 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.195199 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.195218 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.195245 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.195266 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:04Z","lastTransitionTime":"2025-12-05T05:53:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.298475 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.298513 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.298527 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.298544 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.298557 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:04Z","lastTransitionTime":"2025-12-05T05:53:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.399021 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6fb3a766-b56c-4b6c-a09e-3666df4accb8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f54b5f75e10a5fd9f43eec7433614b23bef72beb32ff9028155852c09d9b2e7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c03a8d713a4b0a06c47eb28b20328d66f0f3475b56a16ded6f429dd6648e13a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13e953bfbd6033682f3959815eedf4f814d275be3391564618723c3491faee3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d24831966361aea37178bde47fb600caca13eccaf510a19017e7404cb801114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d24831966361aea37178bde47fb600caca13eccaf510a19017e7404cb801114\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.406223 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.406288 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.406302 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.406321 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.406643 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:04Z","lastTransitionTime":"2025-12-05T05:53:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.420341 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.440375 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.462684 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581c48baad5c52f537dbeef4685c29e54b3748256140bac370782fe889a4330d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.487171 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40915a8bc31b8415ebbb1fdfb58ad53dcbae69ac9737c949eabfcd821200ceb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40915a8bc31b8415ebbb1fdfb58ad53dcbae69ac9737c949eabfcd821200ceb5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:52:52Z\\\",\\\"message\\\":\\\"94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:52.433832 6371 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-ttdt8\\\\nI1205 05:52:52.433778 6371 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:52.433851 6371 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-ttdt8 in node crc\\\\nI1205 05:52:52.433864 6371 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-ttdt8 after 0 failed attempt(s)\\\\nI1205 05:52:52.433890 6371 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-ttdt8\\\\nI1205 05:52:52.433835 6371 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/machine-api-operator-machine-webhook]} name:Service_openshift-machine-api/mac\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-m9jc4_openshift-ovn-kubernetes(06ddc689-50f2-409f-9ac8-8f6a1bed0831)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.506970 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.509385 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.509531 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.509622 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.509869 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.509967 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:04Z","lastTransitionTime":"2025-12-05T05:53:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.527023 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.541659 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.556684 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"056de541-2d3a-4782-a2cc-0c96c465ca6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff553f2d53d8dbc9847c9055d7f9bf398e1ce8a955100a41e958800ad730e4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbe2749cda239484a91be5aeb17a4d22cf1e190e814880af130c97a903ee8d33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hv29p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.570844 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pbtb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69352e1-2d48-4211-83e1-25d09fff9d3c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pbtb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.602564 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.615288 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.615351 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.615368 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.615391 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.615409 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:04Z","lastTransitionTime":"2025-12-05T05:53:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.627918 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.644701 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.663896 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.678713 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.697205 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.714469 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.719800 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.719864 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.719888 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.719917 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.719942 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:04Z","lastTransitionTime":"2025-12-05T05:53:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.732267 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:04Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.823822 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.824825 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.824950 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.825137 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.825258 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:04Z","lastTransitionTime":"2025-12-05T05:53:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.928422 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.928470 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.928487 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.928511 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:04 crc kubenswrapper[4742]: I1205 05:53:04.928551 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:04Z","lastTransitionTime":"2025-12-05T05:53:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.030997 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.031046 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.031099 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.031122 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.031139 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:05Z","lastTransitionTime":"2025-12-05T05:53:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.133994 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.134036 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.134048 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.134084 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.134098 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:05Z","lastTransitionTime":"2025-12-05T05:53:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.236826 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.236877 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.236894 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.236917 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.236935 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:05Z","lastTransitionTime":"2025-12-05T05:53:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.339904 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.339971 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.339989 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.340013 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.340031 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:05Z","lastTransitionTime":"2025-12-05T05:53:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.381707 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:53:05 crc kubenswrapper[4742]: E1205 05:53:05.381829 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.381726 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.381878 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.381912 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:53:05 crc kubenswrapper[4742]: E1205 05:53:05.381918 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:53:05 crc kubenswrapper[4742]: E1205 05:53:05.382088 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:53:05 crc kubenswrapper[4742]: E1205 05:53:05.382160 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.443449 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.443518 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.443541 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.443572 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.443595 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:05Z","lastTransitionTime":"2025-12-05T05:53:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.546392 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.546438 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.546451 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.546467 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.546479 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:05Z","lastTransitionTime":"2025-12-05T05:53:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.649460 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.649540 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.649564 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.649593 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.649615 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:05Z","lastTransitionTime":"2025-12-05T05:53:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.752343 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.752464 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.752489 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.752515 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.752534 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:05Z","lastTransitionTime":"2025-12-05T05:53:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.855146 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.855221 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.855235 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.855252 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.855264 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:05Z","lastTransitionTime":"2025-12-05T05:53:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.958718 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.958793 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.958814 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.958843 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:05 crc kubenswrapper[4742]: I1205 05:53:05.958868 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:05Z","lastTransitionTime":"2025-12-05T05:53:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.062089 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.062151 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.062168 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.062191 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.062213 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:06Z","lastTransitionTime":"2025-12-05T05:53:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.164932 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.165021 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.165050 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.165121 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.165147 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:06Z","lastTransitionTime":"2025-12-05T05:53:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.268835 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.268897 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.268920 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.268949 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.268969 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:06Z","lastTransitionTime":"2025-12-05T05:53:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.372338 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.372422 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.372444 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.372849 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.372880 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:06Z","lastTransitionTime":"2025-12-05T05:53:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.384107 4742 scope.go:117] "RemoveContainer" containerID="40915a8bc31b8415ebbb1fdfb58ad53dcbae69ac9737c949eabfcd821200ceb5" Dec 05 05:53:06 crc kubenswrapper[4742]: E1205 05:53:06.384878 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-m9jc4_openshift-ovn-kubernetes(06ddc689-50f2-409f-9ac8-8f6a1bed0831)\"" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.475545 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.475575 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.475584 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.475596 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.475604 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:06Z","lastTransitionTime":"2025-12-05T05:53:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.579230 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.579291 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.579313 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.579340 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.579361 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:06Z","lastTransitionTime":"2025-12-05T05:53:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.682487 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.682553 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.682574 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.682600 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.682619 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:06Z","lastTransitionTime":"2025-12-05T05:53:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.784954 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.785005 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.785018 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.785031 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.785044 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:06Z","lastTransitionTime":"2025-12-05T05:53:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.887357 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.887385 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.887396 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.887412 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.887422 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:06Z","lastTransitionTime":"2025-12-05T05:53:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.990128 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.990200 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.990212 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.990247 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:06 crc kubenswrapper[4742]: I1205 05:53:06.990260 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:06Z","lastTransitionTime":"2025-12-05T05:53:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.092959 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.092996 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.093007 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.093022 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.093033 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:07Z","lastTransitionTime":"2025-12-05T05:53:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.197659 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.197705 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.197720 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.197737 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.197749 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:07Z","lastTransitionTime":"2025-12-05T05:53:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.301028 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.301197 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.301222 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.301252 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.301272 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:07Z","lastTransitionTime":"2025-12-05T05:53:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.359521 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.359562 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.359573 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.359588 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.359597 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:07Z","lastTransitionTime":"2025-12-05T05:53:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:07 crc kubenswrapper[4742]: E1205 05:53:07.373941 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:07Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.377642 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.377677 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.377688 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.377702 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.377711 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:07Z","lastTransitionTime":"2025-12-05T05:53:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.381898 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.381911 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.381943 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:53:07 crc kubenswrapper[4742]: E1205 05:53:07.381992 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.382210 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:53:07 crc kubenswrapper[4742]: E1205 05:53:07.382216 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:53:07 crc kubenswrapper[4742]: E1205 05:53:07.382636 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:53:07 crc kubenswrapper[4742]: E1205 05:53:07.382468 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:53:07 crc kubenswrapper[4742]: E1205 05:53:07.391618 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:07Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.395716 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.395738 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.395746 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.395759 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.395769 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:07Z","lastTransitionTime":"2025-12-05T05:53:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:07 crc kubenswrapper[4742]: E1205 05:53:07.405794 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:07Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.409099 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.409135 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.409144 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.409156 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.409165 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:07Z","lastTransitionTime":"2025-12-05T05:53:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:07 crc kubenswrapper[4742]: E1205 05:53:07.424821 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:07Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.427749 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.427810 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.427831 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.427855 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.427874 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:07Z","lastTransitionTime":"2025-12-05T05:53:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:07 crc kubenswrapper[4742]: E1205 05:53:07.444217 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:07Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:07 crc kubenswrapper[4742]: E1205 05:53:07.444437 4742 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.445730 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.445791 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.445816 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.445844 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.445869 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:07Z","lastTransitionTime":"2025-12-05T05:53:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.549168 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.549231 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.549247 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.549269 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.549287 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:07Z","lastTransitionTime":"2025-12-05T05:53:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.652723 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.652772 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.652783 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.652799 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.652811 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:07Z","lastTransitionTime":"2025-12-05T05:53:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.755029 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.755096 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.755108 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.755122 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.755133 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:07Z","lastTransitionTime":"2025-12-05T05:53:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.858120 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.858371 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.858451 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.858538 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.858621 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:07Z","lastTransitionTime":"2025-12-05T05:53:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.961118 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.961721 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.961843 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.961953 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:07 crc kubenswrapper[4742]: I1205 05:53:07.962110 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:07Z","lastTransitionTime":"2025-12-05T05:53:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.064591 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.064654 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.064673 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.064701 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.064719 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:08Z","lastTransitionTime":"2025-12-05T05:53:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.166870 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.167174 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.167258 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.167409 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.167547 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:08Z","lastTransitionTime":"2025-12-05T05:53:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.270019 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.270068 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.270077 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.270090 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.270098 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:08Z","lastTransitionTime":"2025-12-05T05:53:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.371815 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.371876 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.371893 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.371915 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.371930 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:08Z","lastTransitionTime":"2025-12-05T05:53:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.394413 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.474895 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.475159 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.475278 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.475393 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.475507 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:08Z","lastTransitionTime":"2025-12-05T05:53:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.577963 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.578005 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.578016 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.578030 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.578041 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:08Z","lastTransitionTime":"2025-12-05T05:53:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.681940 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.682473 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.682639 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.682810 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.683027 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:08Z","lastTransitionTime":"2025-12-05T05:53:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.785970 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.786320 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.786436 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.786575 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.786694 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:08Z","lastTransitionTime":"2025-12-05T05:53:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.889771 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.889816 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.889827 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.889842 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.889852 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:08Z","lastTransitionTime":"2025-12-05T05:53:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.992495 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.992565 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.992584 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.992614 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:08 crc kubenswrapper[4742]: I1205 05:53:08.992633 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:08Z","lastTransitionTime":"2025-12-05T05:53:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.094624 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.094669 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.094680 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.094699 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.094711 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:09Z","lastTransitionTime":"2025-12-05T05:53:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.198402 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.198452 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.198469 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.198493 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.198510 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:09Z","lastTransitionTime":"2025-12-05T05:53:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.301539 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.301580 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.301590 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.301603 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.301613 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:09Z","lastTransitionTime":"2025-12-05T05:53:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.381734 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.381762 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:53:09 crc kubenswrapper[4742]: E1205 05:53:09.381849 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.381922 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:53:09 crc kubenswrapper[4742]: E1205 05:53:09.381992 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.382028 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:53:09 crc kubenswrapper[4742]: E1205 05:53:09.382105 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:53:09 crc kubenswrapper[4742]: E1205 05:53:09.382189 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.403652 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.403686 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.403694 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.403707 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.403718 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:09Z","lastTransitionTime":"2025-12-05T05:53:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.506180 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.506271 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.506293 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.506315 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.506332 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:09Z","lastTransitionTime":"2025-12-05T05:53:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.609105 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.609168 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.609225 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.609248 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.609264 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:09Z","lastTransitionTime":"2025-12-05T05:53:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.711404 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.711441 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.711452 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.711467 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.711479 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:09Z","lastTransitionTime":"2025-12-05T05:53:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.813637 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.813672 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.813684 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.813696 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.813705 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:09Z","lastTransitionTime":"2025-12-05T05:53:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.855670 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b69352e1-2d48-4211-83e1-25d09fff9d3c-metrics-certs\") pod \"network-metrics-daemon-pbtb4\" (UID: \"b69352e1-2d48-4211-83e1-25d09fff9d3c\") " pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:53:09 crc kubenswrapper[4742]: E1205 05:53:09.855887 4742 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 05:53:09 crc kubenswrapper[4742]: E1205 05:53:09.855964 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b69352e1-2d48-4211-83e1-25d09fff9d3c-metrics-certs podName:b69352e1-2d48-4211-83e1-25d09fff9d3c nodeName:}" failed. No retries permitted until 2025-12-05 05:53:41.855939162 +0000 UTC m=+97.768074264 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b69352e1-2d48-4211-83e1-25d09fff9d3c-metrics-certs") pod "network-metrics-daemon-pbtb4" (UID: "b69352e1-2d48-4211-83e1-25d09fff9d3c") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.921389 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.921438 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.921450 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.921467 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:09 crc kubenswrapper[4742]: I1205 05:53:09.921478 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:09Z","lastTransitionTime":"2025-12-05T05:53:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.024876 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.024937 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.024955 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.024980 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.024996 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:10Z","lastTransitionTime":"2025-12-05T05:53:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.128093 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.128181 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.128198 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.128221 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.128238 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:10Z","lastTransitionTime":"2025-12-05T05:53:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.230996 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.231219 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.231311 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.231405 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.231497 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:10Z","lastTransitionTime":"2025-12-05T05:53:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.334262 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.334669 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.334791 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.334872 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.334956 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:10Z","lastTransitionTime":"2025-12-05T05:53:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.436863 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.436899 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.436909 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.436925 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.436937 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:10Z","lastTransitionTime":"2025-12-05T05:53:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.539625 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.539688 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.539705 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.539729 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.539746 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:10Z","lastTransitionTime":"2025-12-05T05:53:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.641556 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.641632 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.641645 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.641661 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.641673 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:10Z","lastTransitionTime":"2025-12-05T05:53:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.743849 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.743911 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.743930 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.743955 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.743972 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:10Z","lastTransitionTime":"2025-12-05T05:53:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.846696 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.846735 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.846748 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.846764 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.846778 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:10Z","lastTransitionTime":"2025-12-05T05:53:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.950399 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.950500 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.950519 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.950544 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:10 crc kubenswrapper[4742]: I1205 05:53:10.950562 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:10Z","lastTransitionTime":"2025-12-05T05:53:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.053468 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.053536 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.053549 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.053571 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.053583 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:11Z","lastTransitionTime":"2025-12-05T05:53:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.108473 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-776bt_39641a18-5d13-441f-9956-3777b9f27703/kube-multus/0.log" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.108530 4742 generic.go:334] "Generic (PLEG): container finished" podID="39641a18-5d13-441f-9956-3777b9f27703" containerID="0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8" exitCode=1 Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.108558 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-776bt" event={"ID":"39641a18-5d13-441f-9956-3777b9f27703","Type":"ContainerDied","Data":"0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8"} Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.108900 4742 scope.go:117] "RemoveContainer" containerID="0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.130710 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581c48baad5c52f537dbeef4685c29e54b3748256140bac370782fe889a4330d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.156190 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.156350 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.156412 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.156482 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.156562 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:11Z","lastTransitionTime":"2025-12-05T05:53:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.160677 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40915a8bc31b8415ebbb1fdfb58ad53dcbae69ac9737c949eabfcd821200ceb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40915a8bc31b8415ebbb1fdfb58ad53dcbae69ac9737c949eabfcd821200ceb5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:52:52Z\\\",\\\"message\\\":\\\"94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:52.433832 6371 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-ttdt8\\\\nI1205 05:52:52.433778 6371 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:52.433851 6371 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-ttdt8 in node crc\\\\nI1205 05:52:52.433864 6371 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-ttdt8 after 0 failed attempt(s)\\\\nI1205 05:52:52.433890 6371 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-ttdt8\\\\nI1205 05:52:52.433835 6371 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/machine-api-operator-machine-webhook]} name:Service_openshift-machine-api/mac\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-m9jc4_openshift-ovn-kubernetes(06ddc689-50f2-409f-9ac8-8f6a1bed0831)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.175727 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6fb3a766-b56c-4b6c-a09e-3666df4accb8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f54b5f75e10a5fd9f43eec7433614b23bef72beb32ff9028155852c09d9b2e7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c03a8d713a4b0a06c47eb28b20328d66f0f3475b56a16ded6f429dd6648e13a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13e953bfbd6033682f3959815eedf4f814d275be3391564618723c3491faee3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d24831966361aea37178bde47fb600caca13eccaf510a19017e7404cb801114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d24831966361aea37178bde47fb600caca13eccaf510a19017e7404cb801114\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.198087 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.219353 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.234561 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"056de541-2d3a-4782-a2cc-0c96c465ca6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff553f2d53d8dbc9847c9055d7f9bf398e1ce8a955100a41e958800ad730e4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbe2749cda239484a91be5aeb17a4d22cf1e190e814880af130c97a903ee8d33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hv29p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.247384 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pbtb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69352e1-2d48-4211-83e1-25d09fff9d3c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pbtb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.257552 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.259201 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.259244 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.259255 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.259271 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.259282 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:11Z","lastTransitionTime":"2025-12-05T05:53:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.271037 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:11Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:53:10Z\\\",\\\"message\\\":\\\"2025-12-05T05:52:24+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_856c13c0-c93e-4630-87d1-ff3d8f16bab5\\\\n2025-12-05T05:52:24+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_856c13c0-c93e-4630-87d1-ff3d8f16bab5 to /host/opt/cni/bin/\\\\n2025-12-05T05:52:25Z [verbose] multus-daemon started\\\\n2025-12-05T05:52:25Z [verbose] Readiness Indicator file check\\\\n2025-12-05T05:53:10Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.280495 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.292575 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.300991 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b1b19a-ac1a-4884-b500-1a4e5d2ff816\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7081c0c1a2a04736b851b2891cf22d96332e3361d93479f3fae43034a9fff212\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3232dd64908cc76a49f558975766eaf5423bc83eeb5e4ebd6303600dd0b7bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3232dd64908cc76a49f558975766eaf5423bc83eeb5e4ebd6303600dd0b7bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.319490 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.335456 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.351381 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.361954 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.362000 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.362017 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.362041 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.362082 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:11Z","lastTransitionTime":"2025-12-05T05:53:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.371633 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.382488 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:53:11 crc kubenswrapper[4742]: E1205 05:53:11.382710 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.383132 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:53:11 crc kubenswrapper[4742]: E1205 05:53:11.383426 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.383732 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:53:11 crc kubenswrapper[4742]: E1205 05:53:11.383875 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.384580 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:53:11 crc kubenswrapper[4742]: E1205 05:53:11.384744 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.387015 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.405844 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.420774 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:11Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.464609 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.464644 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.464656 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.464672 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.464684 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:11Z","lastTransitionTime":"2025-12-05T05:53:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.567301 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.567349 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.567360 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.567389 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.567401 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:11Z","lastTransitionTime":"2025-12-05T05:53:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.670845 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.670909 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.670927 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.670951 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.670968 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:11Z","lastTransitionTime":"2025-12-05T05:53:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.774385 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.774422 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.774432 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.774446 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.774454 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:11Z","lastTransitionTime":"2025-12-05T05:53:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.877205 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.877245 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.877254 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.877272 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.877282 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:11Z","lastTransitionTime":"2025-12-05T05:53:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.979188 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.979231 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.979240 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.979257 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:11 crc kubenswrapper[4742]: I1205 05:53:11.979266 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:11Z","lastTransitionTime":"2025-12-05T05:53:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.081314 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.081359 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.081375 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.081396 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.081410 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:12Z","lastTransitionTime":"2025-12-05T05:53:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.112997 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-776bt_39641a18-5d13-441f-9956-3777b9f27703/kube-multus/0.log" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.113390 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-776bt" event={"ID":"39641a18-5d13-441f-9956-3777b9f27703","Type":"ContainerStarted","Data":"c8018f3950c937efad0c3cafc0ce7a20baefa32c9176d2de69397d16610bf422"} Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.131449 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.152430 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8018f3950c937efad0c3cafc0ce7a20baefa32c9176d2de69397d16610bf422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:53:10Z\\\",\\\"message\\\":\\\"2025-12-05T05:52:24+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_856c13c0-c93e-4630-87d1-ff3d8f16bab5\\\\n2025-12-05T05:52:24+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_856c13c0-c93e-4630-87d1-ff3d8f16bab5 to /host/opt/cni/bin/\\\\n2025-12-05T05:52:25Z [verbose] multus-daemon started\\\\n2025-12-05T05:52:25Z [verbose] Readiness Indicator file check\\\\n2025-12-05T05:53:10Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:53:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.166734 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.178862 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"056de541-2d3a-4782-a2cc-0c96c465ca6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff553f2d53d8dbc9847c9055d7f9bf398e1ce8a955100a41e958800ad730e4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbe2749cda239484a91be5aeb17a4d22cf1e190e814880af130c97a903ee8d33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hv29p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.184501 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.184562 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.184585 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.184615 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.184636 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:12Z","lastTransitionTime":"2025-12-05T05:53:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.190029 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pbtb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69352e1-2d48-4211-83e1-25d09fff9d3c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pbtb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.202262 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b1b19a-ac1a-4884-b500-1a4e5d2ff816\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7081c0c1a2a04736b851b2891cf22d96332e3361d93479f3fae43034a9fff212\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3232dd64908cc76a49f558975766eaf5423bc83eeb5e4ebd6303600dd0b7bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3232dd64908cc76a49f558975766eaf5423bc83eeb5e4ebd6303600dd0b7bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.220556 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.235939 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.247129 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.257880 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.269540 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.282111 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.286598 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.286622 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.286631 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.286642 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.286650 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:12Z","lastTransitionTime":"2025-12-05T05:53:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.294511 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.306131 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.317079 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6fb3a766-b56c-4b6c-a09e-3666df4accb8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f54b5f75e10a5fd9f43eec7433614b23bef72beb32ff9028155852c09d9b2e7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c03a8d713a4b0a06c47eb28b20328d66f0f3475b56a16ded6f429dd6648e13a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13e953bfbd6033682f3959815eedf4f814d275be3391564618723c3491faee3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d24831966361aea37178bde47fb600caca13eccaf510a19017e7404cb801114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d24831966361aea37178bde47fb600caca13eccaf510a19017e7404cb801114\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.332786 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.343534 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.357153 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581c48baad5c52f537dbeef4685c29e54b3748256140bac370782fe889a4330d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.385791 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40915a8bc31b8415ebbb1fdfb58ad53dcbae69ac9737c949eabfcd821200ceb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40915a8bc31b8415ebbb1fdfb58ad53dcbae69ac9737c949eabfcd821200ceb5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:52:52Z\\\",\\\"message\\\":\\\"94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:52.433832 6371 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-ttdt8\\\\nI1205 05:52:52.433778 6371 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:52.433851 6371 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-ttdt8 in node crc\\\\nI1205 05:52:52.433864 6371 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-ttdt8 after 0 failed attempt(s)\\\\nI1205 05:52:52.433890 6371 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-ttdt8\\\\nI1205 05:52:52.433835 6371 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/machine-api-operator-machine-webhook]} name:Service_openshift-machine-api/mac\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-m9jc4_openshift-ovn-kubernetes(06ddc689-50f2-409f-9ac8-8f6a1bed0831)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:12Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.388724 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.388826 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.388892 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.388955 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.389023 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:12Z","lastTransitionTime":"2025-12-05T05:53:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.490724 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.490958 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.491035 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.491138 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.491220 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:12Z","lastTransitionTime":"2025-12-05T05:53:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.593379 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.593411 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.593419 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.593434 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.593443 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:12Z","lastTransitionTime":"2025-12-05T05:53:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.696144 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.696213 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.696237 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.696262 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.696279 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:12Z","lastTransitionTime":"2025-12-05T05:53:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.798692 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.799139 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.799311 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.799848 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.800004 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:12Z","lastTransitionTime":"2025-12-05T05:53:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.902673 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.902711 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.902724 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.902739 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:12 crc kubenswrapper[4742]: I1205 05:53:12.902750 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:12Z","lastTransitionTime":"2025-12-05T05:53:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.009728 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.010072 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.010187 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.010336 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.010450 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:13Z","lastTransitionTime":"2025-12-05T05:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.113163 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.113902 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.114045 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.114214 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.114311 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:13Z","lastTransitionTime":"2025-12-05T05:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.217325 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.217376 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.217394 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.217417 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.217435 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:13Z","lastTransitionTime":"2025-12-05T05:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.319682 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.319739 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.319753 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.319771 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.319784 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:13Z","lastTransitionTime":"2025-12-05T05:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.382357 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.382407 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:53:13 crc kubenswrapper[4742]: E1205 05:53:13.382470 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:53:13 crc kubenswrapper[4742]: E1205 05:53:13.382654 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.382738 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:53:13 crc kubenswrapper[4742]: E1205 05:53:13.383039 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.382877 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:53:13 crc kubenswrapper[4742]: E1205 05:53:13.383226 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.422861 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.422903 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.422916 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.422934 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.422946 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:13Z","lastTransitionTime":"2025-12-05T05:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.525218 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.525271 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.525284 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.525301 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.525313 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:13Z","lastTransitionTime":"2025-12-05T05:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.629002 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.629112 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.629132 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.629158 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.629177 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:13Z","lastTransitionTime":"2025-12-05T05:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.732182 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.732221 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.732232 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.732247 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.732258 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:13Z","lastTransitionTime":"2025-12-05T05:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.834723 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.834755 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.834766 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.834781 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.834810 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:13Z","lastTransitionTime":"2025-12-05T05:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.937426 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.937464 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.937473 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.937488 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:13 crc kubenswrapper[4742]: I1205 05:53:13.937501 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:13Z","lastTransitionTime":"2025-12-05T05:53:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.039421 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.039470 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.039484 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.039500 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.039512 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:14Z","lastTransitionTime":"2025-12-05T05:53:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.141814 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.141868 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.141886 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.141908 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.141925 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:14Z","lastTransitionTime":"2025-12-05T05:53:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.244628 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.244696 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.244720 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.244749 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.244771 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:14Z","lastTransitionTime":"2025-12-05T05:53:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.347812 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.347848 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.347859 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.347875 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.347886 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:14Z","lastTransitionTime":"2025-12-05T05:53:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.399914 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b1b19a-ac1a-4884-b500-1a4e5d2ff816\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7081c0c1a2a04736b851b2891cf22d96332e3361d93479f3fae43034a9fff212\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3232dd64908cc76a49f558975766eaf5423bc83eeb5e4ebd6303600dd0b7bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3232dd64908cc76a49f558975766eaf5423bc83eeb5e4ebd6303600dd0b7bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.428266 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.446320 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.450282 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.450324 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.450335 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.450349 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.450358 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:14Z","lastTransitionTime":"2025-12-05T05:53:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.458708 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.471447 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.489552 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.507478 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.521668 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.530883 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.543608 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6fb3a766-b56c-4b6c-a09e-3666df4accb8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f54b5f75e10a5fd9f43eec7433614b23bef72beb32ff9028155852c09d9b2e7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c03a8d713a4b0a06c47eb28b20328d66f0f3475b56a16ded6f429dd6648e13a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13e953bfbd6033682f3959815eedf4f814d275be3391564618723c3491faee3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d24831966361aea37178bde47fb600caca13eccaf510a19017e7404cb801114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d24831966361aea37178bde47fb600caca13eccaf510a19017e7404cb801114\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.552842 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.552879 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.552890 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.552906 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.552917 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:14Z","lastTransitionTime":"2025-12-05T05:53:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.557387 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.567319 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.586263 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581c48baad5c52f537dbeef4685c29e54b3748256140bac370782fe889a4330d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.608302 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://40915a8bc31b8415ebbb1fdfb58ad53dcbae69ac9737c949eabfcd821200ceb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40915a8bc31b8415ebbb1fdfb58ad53dcbae69ac9737c949eabfcd821200ceb5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:52:52Z\\\",\\\"message\\\":\\\"94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:52.433832 6371 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-ttdt8\\\\nI1205 05:52:52.433778 6371 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:52.433851 6371 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-ttdt8 in node crc\\\\nI1205 05:52:52.433864 6371 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-ttdt8 after 0 failed attempt(s)\\\\nI1205 05:52:52.433890 6371 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-ttdt8\\\\nI1205 05:52:52.433835 6371 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/machine-api-operator-machine-webhook]} name:Service_openshift-machine-api/mac\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-m9jc4_openshift-ovn-kubernetes(06ddc689-50f2-409f-9ac8-8f6a1bed0831)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.617320 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.632640 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8018f3950c937efad0c3cafc0ce7a20baefa32c9176d2de69397d16610bf422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:53:10Z\\\",\\\"message\\\":\\\"2025-12-05T05:52:24+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_856c13c0-c93e-4630-87d1-ff3d8f16bab5\\\\n2025-12-05T05:52:24+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_856c13c0-c93e-4630-87d1-ff3d8f16bab5 to /host/opt/cni/bin/\\\\n2025-12-05T05:52:25Z [verbose] multus-daemon started\\\\n2025-12-05T05:52:25Z [verbose] Readiness Indicator file check\\\\n2025-12-05T05:53:10Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:53:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.641465 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.650564 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"056de541-2d3a-4782-a2cc-0c96c465ca6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff553f2d53d8dbc9847c9055d7f9bf398e1ce8a955100a41e958800ad730e4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbe2749cda239484a91be5aeb17a4d22cf1e190e814880af130c97a903ee8d33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hv29p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.655461 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.655511 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.655525 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.655543 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.655555 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:14Z","lastTransitionTime":"2025-12-05T05:53:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.662919 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pbtb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69352e1-2d48-4211-83e1-25d09fff9d3c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pbtb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:14Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.758725 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.758783 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.758799 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.758829 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.758847 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:14Z","lastTransitionTime":"2025-12-05T05:53:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.860799 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.860832 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.860841 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.860853 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.860861 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:14Z","lastTransitionTime":"2025-12-05T05:53:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.963160 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.963212 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.963223 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.963242 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:14 crc kubenswrapper[4742]: I1205 05:53:14.963254 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:14Z","lastTransitionTime":"2025-12-05T05:53:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.065352 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.065380 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.065389 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.065401 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.065410 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:15Z","lastTransitionTime":"2025-12-05T05:53:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.167749 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.167794 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.167802 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.167816 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.167826 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:15Z","lastTransitionTime":"2025-12-05T05:53:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.270460 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.270549 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.270583 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.270612 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.270632 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:15Z","lastTransitionTime":"2025-12-05T05:53:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.373210 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.373251 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.373264 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.373280 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.373293 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:15Z","lastTransitionTime":"2025-12-05T05:53:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.382240 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.382281 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.382340 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.382362 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:53:15 crc kubenswrapper[4742]: E1205 05:53:15.382484 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:53:15 crc kubenswrapper[4742]: E1205 05:53:15.382711 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:53:15 crc kubenswrapper[4742]: E1205 05:53:15.382805 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:53:15 crc kubenswrapper[4742]: E1205 05:53:15.382876 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.475860 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.475900 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.475912 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.475928 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.475940 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:15Z","lastTransitionTime":"2025-12-05T05:53:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.578789 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.578829 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.578840 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.578855 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.578865 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:15Z","lastTransitionTime":"2025-12-05T05:53:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.680663 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.680887 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.680977 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.681104 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.681196 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:15Z","lastTransitionTime":"2025-12-05T05:53:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.783762 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.783816 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.783832 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.783856 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.783872 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:15Z","lastTransitionTime":"2025-12-05T05:53:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.886471 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.886747 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.886848 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.886948 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.887039 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:15Z","lastTransitionTime":"2025-12-05T05:53:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.990104 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.990155 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.990166 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.990185 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:15 crc kubenswrapper[4742]: I1205 05:53:15.990197 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:15Z","lastTransitionTime":"2025-12-05T05:53:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.093165 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.093488 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.093632 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.093764 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.093888 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:16Z","lastTransitionTime":"2025-12-05T05:53:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.197579 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.197669 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.197696 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.198120 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.198164 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:16Z","lastTransitionTime":"2025-12-05T05:53:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.301011 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.301082 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.301098 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.301119 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.301133 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:16Z","lastTransitionTime":"2025-12-05T05:53:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.403598 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.403652 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.403664 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.403683 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.403694 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:16Z","lastTransitionTime":"2025-12-05T05:53:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.507636 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.507665 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.507674 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.507689 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.507698 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:16Z","lastTransitionTime":"2025-12-05T05:53:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.609725 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.609790 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.609807 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.609834 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.609851 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:16Z","lastTransitionTime":"2025-12-05T05:53:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.712307 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.712691 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.712894 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.713134 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.713326 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:16Z","lastTransitionTime":"2025-12-05T05:53:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.815372 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.815438 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.815456 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.815481 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.815499 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:16Z","lastTransitionTime":"2025-12-05T05:53:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.918240 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.918291 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.918313 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.918343 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:16 crc kubenswrapper[4742]: I1205 05:53:16.918365 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:16Z","lastTransitionTime":"2025-12-05T05:53:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.020011 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.020318 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.020415 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.020514 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.020617 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:17Z","lastTransitionTime":"2025-12-05T05:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.124288 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.124359 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.124383 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.124413 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.124446 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:17Z","lastTransitionTime":"2025-12-05T05:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.227759 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.227810 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.227820 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.227844 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.227862 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:17Z","lastTransitionTime":"2025-12-05T05:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.330638 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.330698 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.330715 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.330739 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.330756 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:17Z","lastTransitionTime":"2025-12-05T05:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.382410 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.382587 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.382705 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:53:17 crc kubenswrapper[4742]: E1205 05:53:17.382717 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.382820 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:53:17 crc kubenswrapper[4742]: E1205 05:53:17.382933 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:53:17 crc kubenswrapper[4742]: E1205 05:53:17.383029 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:53:17 crc kubenswrapper[4742]: E1205 05:53:17.383163 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.433308 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.433355 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.433373 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.433395 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.433412 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:17Z","lastTransitionTime":"2025-12-05T05:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.460879 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.460927 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.460944 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.460964 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.460979 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:17Z","lastTransitionTime":"2025-12-05T05:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:17 crc kubenswrapper[4742]: E1205 05:53:17.481655 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:17Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.486763 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.486802 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.486818 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.486838 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.486857 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:17Z","lastTransitionTime":"2025-12-05T05:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:17 crc kubenswrapper[4742]: E1205 05:53:17.509026 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:17Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.514518 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.514576 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.514595 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.514619 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.514637 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:17Z","lastTransitionTime":"2025-12-05T05:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:17 crc kubenswrapper[4742]: E1205 05:53:17.536301 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:17Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.541407 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.541496 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.541524 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.541557 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.541581 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:17Z","lastTransitionTime":"2025-12-05T05:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:17 crc kubenswrapper[4742]: E1205 05:53:17.561364 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:17Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.566246 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.566307 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.566325 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.566350 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.566371 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:17Z","lastTransitionTime":"2025-12-05T05:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:17 crc kubenswrapper[4742]: E1205 05:53:17.586084 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:17Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:17 crc kubenswrapper[4742]: E1205 05:53:17.586318 4742 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.588532 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.588585 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.588605 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.588632 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.588652 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:17Z","lastTransitionTime":"2025-12-05T05:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.692330 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.692410 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.692432 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.692462 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.692484 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:17Z","lastTransitionTime":"2025-12-05T05:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.795929 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.796013 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.796038 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.796106 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.796129 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:17Z","lastTransitionTime":"2025-12-05T05:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.899662 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.899731 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.899754 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.899824 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:17 crc kubenswrapper[4742]: I1205 05:53:17.899842 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:17Z","lastTransitionTime":"2025-12-05T05:53:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.003405 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.003497 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.003522 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.003553 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.003575 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:18Z","lastTransitionTime":"2025-12-05T05:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.106920 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.106980 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.106998 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.107022 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.107041 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:18Z","lastTransitionTime":"2025-12-05T05:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.215588 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.215634 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.215651 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.215673 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.215690 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:18Z","lastTransitionTime":"2025-12-05T05:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.318378 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.318449 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.318473 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.318503 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.318526 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:18Z","lastTransitionTime":"2025-12-05T05:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.383839 4742 scope.go:117] "RemoveContainer" containerID="40915a8bc31b8415ebbb1fdfb58ad53dcbae69ac9737c949eabfcd821200ceb5" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.421684 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.421797 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.421817 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.421840 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.421856 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:18Z","lastTransitionTime":"2025-12-05T05:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.525930 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.526320 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.526510 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.526689 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.526910 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:18Z","lastTransitionTime":"2025-12-05T05:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.630893 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.630952 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.630969 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.630993 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.631011 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:18Z","lastTransitionTime":"2025-12-05T05:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.734139 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.734286 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.734306 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.734329 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.734378 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:18Z","lastTransitionTime":"2025-12-05T05:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.837165 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.837212 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.837230 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.837256 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.837272 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:18Z","lastTransitionTime":"2025-12-05T05:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.940413 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.940460 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.940472 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.940490 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:18 crc kubenswrapper[4742]: I1205 05:53:18.940504 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:18Z","lastTransitionTime":"2025-12-05T05:53:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.044000 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.044039 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.044050 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.044089 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.044105 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:19Z","lastTransitionTime":"2025-12-05T05:53:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.135553 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m9jc4_06ddc689-50f2-409f-9ac8-8f6a1bed0831/ovnkube-controller/2.log" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.138427 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" event={"ID":"06ddc689-50f2-409f-9ac8-8f6a1bed0831","Type":"ContainerStarted","Data":"d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251"} Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.139018 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.146551 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.146590 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.146605 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.146624 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.146639 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:19Z","lastTransitionTime":"2025-12-05T05:53:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.154854 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b1b19a-ac1a-4884-b500-1a4e5d2ff816\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7081c0c1a2a04736b851b2891cf22d96332e3361d93479f3fae43034a9fff212\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3232dd64908cc76a49f558975766eaf5423bc83eeb5e4ebd6303600dd0b7bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3232dd64908cc76a49f558975766eaf5423bc83eeb5e4ebd6303600dd0b7bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:19Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.182440 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:19Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.205475 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:19Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.226437 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:19Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.238271 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:19Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.248523 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.248559 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.248571 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.248592 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.248607 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:19Z","lastTransitionTime":"2025-12-05T05:53:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.257717 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:19Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.276970 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:19Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.292117 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:19Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.307530 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:19Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.319879 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6fb3a766-b56c-4b6c-a09e-3666df4accb8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f54b5f75e10a5fd9f43eec7433614b23bef72beb32ff9028155852c09d9b2e7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c03a8d713a4b0a06c47eb28b20328d66f0f3475b56a16ded6f429dd6648e13a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13e953bfbd6033682f3959815eedf4f814d275be3391564618723c3491faee3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d24831966361aea37178bde47fb600caca13eccaf510a19017e7404cb801114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d24831966361aea37178bde47fb600caca13eccaf510a19017e7404cb801114\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:19Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.331362 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:19Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.343554 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:19Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.351047 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.351116 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.351129 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.351146 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.351157 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:19Z","lastTransitionTime":"2025-12-05T05:53:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.355023 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581c48baad5c52f537dbeef4685c29e54b3748256140bac370782fe889a4330d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:19Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.369550 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40915a8bc31b8415ebbb1fdfb58ad53dcbae69ac9737c949eabfcd821200ceb5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:52:52Z\\\",\\\"message\\\":\\\"94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:52.433832 6371 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-ttdt8\\\\nI1205 05:52:52.433778 6371 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:52.433851 6371 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-ttdt8 in node crc\\\\nI1205 05:52:52.433864 6371 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-ttdt8 after 0 failed attempt(s)\\\\nI1205 05:52:52.433890 6371 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-ttdt8\\\\nI1205 05:52:52.433835 6371 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/machine-api-operator-machine-webhook]} name:Service_openshift-machine-api/mac\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:53:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:19Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.378557 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:19Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.382067 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.382106 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.382180 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.382236 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:53:19 crc kubenswrapper[4742]: E1205 05:53:19.382323 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:53:19 crc kubenswrapper[4742]: E1205 05:53:19.382410 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:53:19 crc kubenswrapper[4742]: E1205 05:53:19.382530 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:53:19 crc kubenswrapper[4742]: E1205 05:53:19.382597 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.387654 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8018f3950c937efad0c3cafc0ce7a20baefa32c9176d2de69397d16610bf422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:53:10Z\\\",\\\"message\\\":\\\"2025-12-05T05:52:24+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_856c13c0-c93e-4630-87d1-ff3d8f16bab5\\\\n2025-12-05T05:52:24+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_856c13c0-c93e-4630-87d1-ff3d8f16bab5 to /host/opt/cni/bin/\\\\n2025-12-05T05:52:25Z [verbose] multus-daemon started\\\\n2025-12-05T05:52:25Z [verbose] Readiness Indicator file check\\\\n2025-12-05T05:53:10Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:53:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:19Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.397730 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:19Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.408644 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"056de541-2d3a-4782-a2cc-0c96c465ca6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff553f2d53d8dbc9847c9055d7f9bf398e1ce8a955100a41e958800ad730e4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbe2749cda239484a91be5aeb17a4d22cf1e190e814880af130c97a903ee8d33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hv29p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:19Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.420256 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pbtb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69352e1-2d48-4211-83e1-25d09fff9d3c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pbtb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:19Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.455436 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.455480 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.455493 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.455508 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.455523 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:19Z","lastTransitionTime":"2025-12-05T05:53:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.558209 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.558289 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.558302 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.558324 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.558338 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:19Z","lastTransitionTime":"2025-12-05T05:53:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.660611 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.660673 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.660688 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.660706 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.660718 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:19Z","lastTransitionTime":"2025-12-05T05:53:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.763817 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.763897 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.763919 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.763945 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.763963 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:19Z","lastTransitionTime":"2025-12-05T05:53:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.866518 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.866581 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.866600 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.866627 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.866646 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:19Z","lastTransitionTime":"2025-12-05T05:53:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.971012 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.971119 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.971133 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.971157 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:19 crc kubenswrapper[4742]: I1205 05:53:19.971174 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:19Z","lastTransitionTime":"2025-12-05T05:53:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.073844 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.073918 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.073938 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.073980 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.073999 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:20Z","lastTransitionTime":"2025-12-05T05:53:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.144834 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m9jc4_06ddc689-50f2-409f-9ac8-8f6a1bed0831/ovnkube-controller/3.log" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.146136 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m9jc4_06ddc689-50f2-409f-9ac8-8f6a1bed0831/ovnkube-controller/2.log" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.156626 4742 generic.go:334] "Generic (PLEG): container finished" podID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerID="d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251" exitCode=1 Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.156697 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" event={"ID":"06ddc689-50f2-409f-9ac8-8f6a1bed0831","Type":"ContainerDied","Data":"d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251"} Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.156762 4742 scope.go:117] "RemoveContainer" containerID="40915a8bc31b8415ebbb1fdfb58ad53dcbae69ac9737c949eabfcd821200ceb5" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.157512 4742 scope.go:117] "RemoveContainer" containerID="d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251" Dec 05 05:53:20 crc kubenswrapper[4742]: E1205 05:53:20.157715 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-m9jc4_openshift-ovn-kubernetes(06ddc689-50f2-409f-9ac8-8f6a1bed0831)\"" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.176596 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.176674 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.176695 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.176722 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.176741 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:20Z","lastTransitionTime":"2025-12-05T05:53:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.179979 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"056de541-2d3a-4782-a2cc-0c96c465ca6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff553f2d53d8dbc9847c9055d7f9bf398e1ce8a955100a41e958800ad730e4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbe2749cda239484a91be5aeb17a4d22cf1e190e814880af130c97a903ee8d33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hv29p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:20Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.197349 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pbtb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69352e1-2d48-4211-83e1-25d09fff9d3c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pbtb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:20Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.212433 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:20Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.231609 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8018f3950c937efad0c3cafc0ce7a20baefa32c9176d2de69397d16610bf422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:53:10Z\\\",\\\"message\\\":\\\"2025-12-05T05:52:24+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_856c13c0-c93e-4630-87d1-ff3d8f16bab5\\\\n2025-12-05T05:52:24+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_856c13c0-c93e-4630-87d1-ff3d8f16bab5 to /host/opt/cni/bin/\\\\n2025-12-05T05:52:25Z [verbose] multus-daemon started\\\\n2025-12-05T05:52:25Z [verbose] Readiness Indicator file check\\\\n2025-12-05T05:53:10Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:53:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:20Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.247891 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:20Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.264415 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:20Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.279497 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.279530 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.279545 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.279565 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.279589 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:20Z","lastTransitionTime":"2025-12-05T05:53:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.281847 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b1b19a-ac1a-4884-b500-1a4e5d2ff816\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7081c0c1a2a04736b851b2891cf22d96332e3361d93479f3fae43034a9fff212\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3232dd64908cc76a49f558975766eaf5423bc83eeb5e4ebd6303600dd0b7bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3232dd64908cc76a49f558975766eaf5423bc83eeb5e4ebd6303600dd0b7bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:20Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.316286 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:20Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.338386 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:20Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.354425 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:20Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.371720 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:20Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.385917 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.385964 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.385980 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.385998 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.386012 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:20Z","lastTransitionTime":"2025-12-05T05:53:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.405948 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:20Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.432293 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:20Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.452508 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:20Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.465352 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581c48baad5c52f537dbeef4685c29e54b3748256140bac370782fe889a4330d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:20Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.480768 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40915a8bc31b8415ebbb1fdfb58ad53dcbae69ac9737c949eabfcd821200ceb5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:52:52Z\\\",\\\"message\\\":\\\"94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:52.433832 6371 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-ttdt8\\\\nI1205 05:52:52.433778 6371 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 05:52:52.433851 6371 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-ttdt8 in node crc\\\\nI1205 05:52:52.433864 6371 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-ttdt8 after 0 failed attempt(s)\\\\nI1205 05:52:52.433890 6371 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-ttdt8\\\\nI1205 05:52:52.433835 6371 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-machine-api/machine-api-operator-machine-webhook]} name:Service_openshift-machine-api/mac\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:51Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:53:19Z\\\",\\\"message\\\":\\\"1.EgressIP event handler 8\\\\nI1205 05:53:19.555770 6722 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 05:53:19.555814 6722 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 05:53:19.555883 6722 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 05:53:19.555890 6722 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 05:53:19.555896 6722 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 05:53:19.555919 6722 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 05:53:19.555919 6722 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 05:53:19.555929 6722 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 05:53:19.555946 6722 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 05:53:19.555940 6722 factory.go:656] Stopping watch factory\\\\nI1205 05:53:19.555956 6722 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 05:53:19.556100 6722 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1205 05:53:19.556186 6722 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1205 05:53:19.556222 6722 ovnkube.go:599] Stopped ovnkube\\\\nI1205 05:53:19.556245 6722 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 05:53:19.556306 6722 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:53:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:20Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.488122 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.488161 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.488172 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.488189 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.488200 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:20Z","lastTransitionTime":"2025-12-05T05:53:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.493762 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6fb3a766-b56c-4b6c-a09e-3666df4accb8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f54b5f75e10a5fd9f43eec7433614b23bef72beb32ff9028155852c09d9b2e7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c03a8d713a4b0a06c47eb28b20328d66f0f3475b56a16ded6f429dd6648e13a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13e953bfbd6033682f3959815eedf4f814d275be3391564618723c3491faee3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d24831966361aea37178bde47fb600caca13eccaf510a19017e7404cb801114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d24831966361aea37178bde47fb600caca13eccaf510a19017e7404cb801114\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:20Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.506642 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:20Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.520323 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:20Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.591336 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.591407 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.591421 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.591439 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.591450 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:20Z","lastTransitionTime":"2025-12-05T05:53:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.694735 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.694794 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.694804 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.694821 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.694832 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:20Z","lastTransitionTime":"2025-12-05T05:53:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.797465 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.797526 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.797536 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.797550 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.797558 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:20Z","lastTransitionTime":"2025-12-05T05:53:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.899892 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.899973 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.899999 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.900029 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:20 crc kubenswrapper[4742]: I1205 05:53:20.900051 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:20Z","lastTransitionTime":"2025-12-05T05:53:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.002717 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.002787 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.002811 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.002834 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.002851 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:21Z","lastTransitionTime":"2025-12-05T05:53:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.106320 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.106382 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.106403 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.106430 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.106453 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:21Z","lastTransitionTime":"2025-12-05T05:53:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.163898 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m9jc4_06ddc689-50f2-409f-9ac8-8f6a1bed0831/ovnkube-controller/3.log" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.169563 4742 scope.go:117] "RemoveContainer" containerID="d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251" Dec 05 05:53:21 crc kubenswrapper[4742]: E1205 05:53:21.169867 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-m9jc4_openshift-ovn-kubernetes(06ddc689-50f2-409f-9ac8-8f6a1bed0831)\"" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.194484 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581c48baad5c52f537dbeef4685c29e54b3748256140bac370782fe889a4330d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:21Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.209431 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.209507 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.209532 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.209563 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.209586 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:21Z","lastTransitionTime":"2025-12-05T05:53:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.227358 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:53:19Z\\\",\\\"message\\\":\\\"1.EgressIP event handler 8\\\\nI1205 05:53:19.555770 6722 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 05:53:19.555814 6722 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 05:53:19.555883 6722 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 05:53:19.555890 6722 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 05:53:19.555896 6722 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 05:53:19.555919 6722 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 05:53:19.555919 6722 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 05:53:19.555929 6722 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 05:53:19.555946 6722 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 05:53:19.555940 6722 factory.go:656] Stopping watch factory\\\\nI1205 05:53:19.555956 6722 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 05:53:19.556100 6722 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1205 05:53:19.556186 6722 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1205 05:53:19.556222 6722 ovnkube.go:599] Stopped ovnkube\\\\nI1205 05:53:19.556245 6722 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 05:53:19.556306 6722 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:53:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-m9jc4_openshift-ovn-kubernetes(06ddc689-50f2-409f-9ac8-8f6a1bed0831)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:21Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.247915 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6fb3a766-b56c-4b6c-a09e-3666df4accb8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f54b5f75e10a5fd9f43eec7433614b23bef72beb32ff9028155852c09d9b2e7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c03a8d713a4b0a06c47eb28b20328d66f0f3475b56a16ded6f429dd6648e13a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13e953bfbd6033682f3959815eedf4f814d275be3391564618723c3491faee3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d24831966361aea37178bde47fb600caca13eccaf510a19017e7404cb801114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d24831966361aea37178bde47fb600caca13eccaf510a19017e7404cb801114\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:21Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.268100 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:21Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.288694 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:21Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.306568 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"056de541-2d3a-4782-a2cc-0c96c465ca6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff553f2d53d8dbc9847c9055d7f9bf398e1ce8a955100a41e958800ad730e4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbe2749cda239484a91be5aeb17a4d22cf1e190e814880af130c97a903ee8d33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hv29p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:21Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.312321 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.312394 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.312414 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.312441 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.312460 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:21Z","lastTransitionTime":"2025-12-05T05:53:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.324682 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pbtb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69352e1-2d48-4211-83e1-25d09fff9d3c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pbtb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:21Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.341544 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:21Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.361916 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8018f3950c937efad0c3cafc0ce7a20baefa32c9176d2de69397d16610bf422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:53:10Z\\\",\\\"message\\\":\\\"2025-12-05T05:52:24+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_856c13c0-c93e-4630-87d1-ff3d8f16bab5\\\\n2025-12-05T05:52:24+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_856c13c0-c93e-4630-87d1-ff3d8f16bab5 to /host/opt/cni/bin/\\\\n2025-12-05T05:52:25Z [verbose] multus-daemon started\\\\n2025-12-05T05:52:25Z [verbose] Readiness Indicator file check\\\\n2025-12-05T05:53:10Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:53:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:21Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.378400 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:21Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.381755 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.381773 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:53:21 crc kubenswrapper[4742]: E1205 05:53:21.381919 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.382024 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:53:21 crc kubenswrapper[4742]: E1205 05:53:21.382044 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.381895 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:53:21 crc kubenswrapper[4742]: E1205 05:53:21.382584 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:53:21 crc kubenswrapper[4742]: E1205 05:53:21.382784 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.400257 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:21Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.414786 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.414827 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.414841 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.414862 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.414877 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:21Z","lastTransitionTime":"2025-12-05T05:53:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.416347 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b1b19a-ac1a-4884-b500-1a4e5d2ff816\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7081c0c1a2a04736b851b2891cf22d96332e3361d93479f3fae43034a9fff212\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3232dd64908cc76a49f558975766eaf5423bc83eeb5e4ebd6303600dd0b7bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3232dd64908cc76a49f558975766eaf5423bc83eeb5e4ebd6303600dd0b7bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:21Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.447035 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:21Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.468269 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:21Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.481758 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:21Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.494511 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:21Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.511888 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:21Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.517563 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.517626 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.517644 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.517674 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.517739 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:21Z","lastTransitionTime":"2025-12-05T05:53:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.527707 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:21Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.542544 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:21Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.619841 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.619893 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.619910 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.619933 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.619950 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:21Z","lastTransitionTime":"2025-12-05T05:53:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.722563 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.722630 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.722648 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.722671 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.722689 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:21Z","lastTransitionTime":"2025-12-05T05:53:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.825865 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.825922 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.825940 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.825964 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.825982 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:21Z","lastTransitionTime":"2025-12-05T05:53:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.929732 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.929794 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.929817 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.929844 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:21 crc kubenswrapper[4742]: I1205 05:53:21.929866 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:21Z","lastTransitionTime":"2025-12-05T05:53:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.032822 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.032883 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.032896 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.032919 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.032935 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:22Z","lastTransitionTime":"2025-12-05T05:53:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.142492 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.142549 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.142565 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.142587 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.142603 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:22Z","lastTransitionTime":"2025-12-05T05:53:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.245585 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.245625 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.245633 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.245647 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.245657 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:22Z","lastTransitionTime":"2025-12-05T05:53:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.348734 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.348783 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.348816 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.348838 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.348855 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:22Z","lastTransitionTime":"2025-12-05T05:53:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.451624 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.451661 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.451671 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.451685 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.451694 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:22Z","lastTransitionTime":"2025-12-05T05:53:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.554645 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.554726 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.554753 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.554784 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.554802 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:22Z","lastTransitionTime":"2025-12-05T05:53:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.657826 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.657875 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.657891 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.657914 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.657930 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:22Z","lastTransitionTime":"2025-12-05T05:53:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.760943 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.761021 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.761044 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.761110 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.761135 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:22Z","lastTransitionTime":"2025-12-05T05:53:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.864903 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.864947 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.864957 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.864975 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.864987 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:22Z","lastTransitionTime":"2025-12-05T05:53:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.968003 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.968105 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.968126 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.968174 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:22 crc kubenswrapper[4742]: I1205 05:53:22.968196 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:22Z","lastTransitionTime":"2025-12-05T05:53:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.072289 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.072351 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.072370 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.072393 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.072413 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:23Z","lastTransitionTime":"2025-12-05T05:53:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.174776 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.174856 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.174881 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.174911 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.174934 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:23Z","lastTransitionTime":"2025-12-05T05:53:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.278095 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.278452 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.278476 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.278506 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.278534 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:23Z","lastTransitionTime":"2025-12-05T05:53:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.381727 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:53:23 crc kubenswrapper[4742]: E1205 05:53:23.381846 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.381867 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.381915 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.381933 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.381957 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.381974 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:23Z","lastTransitionTime":"2025-12-05T05:53:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.382023 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:53:23 crc kubenswrapper[4742]: E1205 05:53:23.382101 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.382147 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:53:23 crc kubenswrapper[4742]: E1205 05:53:23.382199 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.382239 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:53:23 crc kubenswrapper[4742]: E1205 05:53:23.382288 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.484393 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.484439 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.484450 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.484468 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.484479 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:23Z","lastTransitionTime":"2025-12-05T05:53:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.586547 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.586587 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.586602 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.586620 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.586635 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:23Z","lastTransitionTime":"2025-12-05T05:53:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.690460 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.690526 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.690544 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.690569 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.690590 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:23Z","lastTransitionTime":"2025-12-05T05:53:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.793615 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.793670 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.793687 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.793709 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.793725 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:23Z","lastTransitionTime":"2025-12-05T05:53:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.897170 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.897224 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.897245 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.897274 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:23 crc kubenswrapper[4742]: I1205 05:53:23.897300 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:23Z","lastTransitionTime":"2025-12-05T05:53:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:23.999944 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.000025 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.000047 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.000133 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.000161 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:24Z","lastTransitionTime":"2025-12-05T05:53:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.102548 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.102593 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.102606 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.102623 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.102636 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:24Z","lastTransitionTime":"2025-12-05T05:53:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.205145 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.205229 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.205241 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.205259 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.205273 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:24Z","lastTransitionTime":"2025-12-05T05:53:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.312010 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.312120 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.312142 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.312170 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.312189 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:24Z","lastTransitionTime":"2025-12-05T05:53:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.400251 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581c48baad5c52f537dbeef4685c29e54b3748256140bac370782fe889a4330d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.415818 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.415884 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.415907 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.415936 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.415958 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:24Z","lastTransitionTime":"2025-12-05T05:53:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.422957 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:53:19Z\\\",\\\"message\\\":\\\"1.EgressIP event handler 8\\\\nI1205 05:53:19.555770 6722 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 05:53:19.555814 6722 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 05:53:19.555883 6722 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 05:53:19.555890 6722 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 05:53:19.555896 6722 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 05:53:19.555919 6722 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 05:53:19.555919 6722 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 05:53:19.555929 6722 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 05:53:19.555946 6722 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 05:53:19.555940 6722 factory.go:656] Stopping watch factory\\\\nI1205 05:53:19.555956 6722 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 05:53:19.556100 6722 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1205 05:53:19.556186 6722 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1205 05:53:19.556222 6722 ovnkube.go:599] Stopped ovnkube\\\\nI1205 05:53:19.556245 6722 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 05:53:19.556306 6722 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:53:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-m9jc4_openshift-ovn-kubernetes(06ddc689-50f2-409f-9ac8-8f6a1bed0831)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.436307 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6fb3a766-b56c-4b6c-a09e-3666df4accb8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f54b5f75e10a5fd9f43eec7433614b23bef72beb32ff9028155852c09d9b2e7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c03a8d713a4b0a06c47eb28b20328d66f0f3475b56a16ded6f429dd6648e13a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13e953bfbd6033682f3959815eedf4f814d275be3391564618723c3491faee3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d24831966361aea37178bde47fb600caca13eccaf510a19017e7404cb801114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d24831966361aea37178bde47fb600caca13eccaf510a19017e7404cb801114\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.449634 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.464279 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.478720 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"056de541-2d3a-4782-a2cc-0c96c465ca6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff553f2d53d8dbc9847c9055d7f9bf398e1ce8a955100a41e958800ad730e4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbe2749cda239484a91be5aeb17a4d22cf1e190e814880af130c97a903ee8d33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hv29p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.490980 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pbtb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69352e1-2d48-4211-83e1-25d09fff9d3c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pbtb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.502378 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.515141 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8018f3950c937efad0c3cafc0ce7a20baefa32c9176d2de69397d16610bf422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:53:10Z\\\",\\\"message\\\":\\\"2025-12-05T05:52:24+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_856c13c0-c93e-4630-87d1-ff3d8f16bab5\\\\n2025-12-05T05:52:24+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_856c13c0-c93e-4630-87d1-ff3d8f16bab5 to /host/opt/cni/bin/\\\\n2025-12-05T05:52:25Z [verbose] multus-daemon started\\\\n2025-12-05T05:52:25Z [verbose] Readiness Indicator file check\\\\n2025-12-05T05:53:10Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:53:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.519032 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.519092 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.519108 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.519130 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.519145 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:24Z","lastTransitionTime":"2025-12-05T05:53:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.526971 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.540743 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.551330 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b1b19a-ac1a-4884-b500-1a4e5d2ff816\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7081c0c1a2a04736b851b2891cf22d96332e3361d93479f3fae43034a9fff212\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3232dd64908cc76a49f558975766eaf5423bc83eeb5e4ebd6303600dd0b7bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3232dd64908cc76a49f558975766eaf5423bc83eeb5e4ebd6303600dd0b7bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.578507 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.591334 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.608709 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.621766 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.621822 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.621840 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.621863 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.621884 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:24Z","lastTransitionTime":"2025-12-05T05:53:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.623782 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.637214 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.653968 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.671108 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:24Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.724406 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.724464 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.724479 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.724501 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.724517 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:24Z","lastTransitionTime":"2025-12-05T05:53:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.827321 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.827372 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.827385 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.827407 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.827423 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:24Z","lastTransitionTime":"2025-12-05T05:53:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.929713 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.929761 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.929772 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.929790 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:24 crc kubenswrapper[4742]: I1205 05:53:24.929805 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:24Z","lastTransitionTime":"2025-12-05T05:53:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.033143 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.033219 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.033247 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.033278 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.033302 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:25Z","lastTransitionTime":"2025-12-05T05:53:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.136398 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.136440 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.136452 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.136466 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.136475 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:25Z","lastTransitionTime":"2025-12-05T05:53:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.239247 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.239293 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.239305 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.239321 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.239334 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:25Z","lastTransitionTime":"2025-12-05T05:53:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.341323 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.341410 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.341429 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.341453 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.341470 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:25Z","lastTransitionTime":"2025-12-05T05:53:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.382430 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:53:25 crc kubenswrapper[4742]: E1205 05:53:25.382596 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.382867 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:53:25 crc kubenswrapper[4742]: E1205 05:53:25.382966 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.383012 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:53:25 crc kubenswrapper[4742]: E1205 05:53:25.383190 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.383533 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:53:25 crc kubenswrapper[4742]: E1205 05:53:25.383894 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.444100 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.444180 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.444199 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.444224 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.444242 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:25Z","lastTransitionTime":"2025-12-05T05:53:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.546562 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.546621 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.546638 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.546661 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.546680 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:25Z","lastTransitionTime":"2025-12-05T05:53:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.649482 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.649550 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.649579 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.649611 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.649635 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:25Z","lastTransitionTime":"2025-12-05T05:53:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.752516 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.752911 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.753168 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.753331 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.753601 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:25Z","lastTransitionTime":"2025-12-05T05:53:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.856742 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.856809 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.856832 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.856860 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.856882 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:25Z","lastTransitionTime":"2025-12-05T05:53:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.960022 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.960121 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.960139 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.960164 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:25 crc kubenswrapper[4742]: I1205 05:53:25.960183 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:25Z","lastTransitionTime":"2025-12-05T05:53:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.063544 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.063609 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.063633 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.063660 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.063685 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:26Z","lastTransitionTime":"2025-12-05T05:53:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.166832 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.166889 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.166908 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.166932 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.166948 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:26Z","lastTransitionTime":"2025-12-05T05:53:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.269955 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.270024 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.270047 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.270124 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.270147 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:26Z","lastTransitionTime":"2025-12-05T05:53:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.372521 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.372595 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.372614 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.372640 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.372658 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:26Z","lastTransitionTime":"2025-12-05T05:53:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.475682 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.475756 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.475781 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.475807 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.475826 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:26Z","lastTransitionTime":"2025-12-05T05:53:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.578620 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.578663 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.578675 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.578694 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.578707 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:26Z","lastTransitionTime":"2025-12-05T05:53:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.681508 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.681539 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.681547 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.681580 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.681589 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:26Z","lastTransitionTime":"2025-12-05T05:53:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.784845 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.784895 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.784911 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.784933 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.784951 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:26Z","lastTransitionTime":"2025-12-05T05:53:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.888525 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.888588 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.888608 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.888630 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.888647 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:26Z","lastTransitionTime":"2025-12-05T05:53:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.991306 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.991372 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.991389 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.991415 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:26 crc kubenswrapper[4742]: I1205 05:53:26.991435 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:26Z","lastTransitionTime":"2025-12-05T05:53:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.094048 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.094102 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.094114 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.094128 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.094139 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:27Z","lastTransitionTime":"2025-12-05T05:53:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.196199 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.196260 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.196284 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.196312 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.196342 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:27Z","lastTransitionTime":"2025-12-05T05:53:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.237847 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:53:27 crc kubenswrapper[4742]: E1205 05:53:27.238010 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:31.237978777 +0000 UTC m=+147.150113869 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.298964 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.299006 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.299016 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.299031 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.299042 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:27Z","lastTransitionTime":"2025-12-05T05:53:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.338903 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.338983 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.339132 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.339220 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:53:27 crc kubenswrapper[4742]: E1205 05:53:27.339354 4742 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 05:53:27 crc kubenswrapper[4742]: E1205 05:53:27.339432 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 05:54:31.339406853 +0000 UTC m=+147.251541955 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 05:53:27 crc kubenswrapper[4742]: E1205 05:53:27.339553 4742 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 05:53:27 crc kubenswrapper[4742]: E1205 05:53:27.339614 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 05:54:31.339596398 +0000 UTC m=+147.251731490 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 05:53:27 crc kubenswrapper[4742]: E1205 05:53:27.339737 4742 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 05:53:27 crc kubenswrapper[4742]: E1205 05:53:27.339759 4742 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 05:53:27 crc kubenswrapper[4742]: E1205 05:53:27.339780 4742 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 05:53:27 crc kubenswrapper[4742]: E1205 05:53:27.339792 4742 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:53:27 crc kubenswrapper[4742]: E1205 05:53:27.339828 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 05:54:31.339815674 +0000 UTC m=+147.251950736 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:53:27 crc kubenswrapper[4742]: E1205 05:53:27.339767 4742 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 05:53:27 crc kubenswrapper[4742]: E1205 05:53:27.339847 4742 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:53:27 crc kubenswrapper[4742]: E1205 05:53:27.339873 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 05:54:31.339864876 +0000 UTC m=+147.251999938 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.382176 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.382207 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.382181 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:53:27 crc kubenswrapper[4742]: E1205 05:53:27.382348 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:53:27 crc kubenswrapper[4742]: E1205 05:53:27.382496 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:53:27 crc kubenswrapper[4742]: E1205 05:53:27.382679 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.382834 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:53:27 crc kubenswrapper[4742]: E1205 05:53:27.383000 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.402088 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.402161 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.402189 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.402232 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.402273 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:27Z","lastTransitionTime":"2025-12-05T05:53:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.505374 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.505416 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.505426 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.505440 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.505450 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:27Z","lastTransitionTime":"2025-12-05T05:53:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.609332 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.609725 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.609979 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.610243 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.610438 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:27Z","lastTransitionTime":"2025-12-05T05:53:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.646983 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.647052 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.647097 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.647121 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.647138 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:27Z","lastTransitionTime":"2025-12-05T05:53:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:27 crc kubenswrapper[4742]: E1205 05:53:27.670389 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.675574 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.675622 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.675632 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.675648 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.675659 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:27Z","lastTransitionTime":"2025-12-05T05:53:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:27 crc kubenswrapper[4742]: E1205 05:53:27.696363 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.702225 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.702261 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.702274 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.702290 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.702302 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:27Z","lastTransitionTime":"2025-12-05T05:53:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:27 crc kubenswrapper[4742]: E1205 05:53:27.725828 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.732690 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.732756 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.732780 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.732808 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.732830 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:27Z","lastTransitionTime":"2025-12-05T05:53:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:27 crc kubenswrapper[4742]: E1205 05:53:27.754119 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.759201 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.759252 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.759268 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.759289 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.759306 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:27Z","lastTransitionTime":"2025-12-05T05:53:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:27 crc kubenswrapper[4742]: E1205 05:53:27.779505 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:27Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:27 crc kubenswrapper[4742]: E1205 05:53:27.779697 4742 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.781239 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.781272 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.781286 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.781303 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.781316 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:27Z","lastTransitionTime":"2025-12-05T05:53:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.884138 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.884385 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.884453 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.884525 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.884592 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:27Z","lastTransitionTime":"2025-12-05T05:53:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.987442 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.987504 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.987527 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.987555 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:27 crc kubenswrapper[4742]: I1205 05:53:27.987578 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:27Z","lastTransitionTime":"2025-12-05T05:53:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.090633 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.090701 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.090725 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.090753 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.090774 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:28Z","lastTransitionTime":"2025-12-05T05:53:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.193225 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.193296 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.193319 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.193347 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.193367 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:28Z","lastTransitionTime":"2025-12-05T05:53:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.297004 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.297107 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.297119 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.297146 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.297158 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:28Z","lastTransitionTime":"2025-12-05T05:53:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.400371 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.400695 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.400824 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.400964 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.401124 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:28Z","lastTransitionTime":"2025-12-05T05:53:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.504355 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.504435 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.504456 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.504485 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.504504 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:28Z","lastTransitionTime":"2025-12-05T05:53:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.607640 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.607719 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.607747 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.607777 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.607798 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:28Z","lastTransitionTime":"2025-12-05T05:53:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.711084 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.711140 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.711158 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.711182 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.711200 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:28Z","lastTransitionTime":"2025-12-05T05:53:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.814702 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.814788 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.814805 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.814828 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.814844 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:28Z","lastTransitionTime":"2025-12-05T05:53:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.918130 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.918243 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.918268 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.918296 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:28 crc kubenswrapper[4742]: I1205 05:53:28.918321 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:28Z","lastTransitionTime":"2025-12-05T05:53:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.021431 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.021491 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.021511 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.021544 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.021580 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:29Z","lastTransitionTime":"2025-12-05T05:53:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.123958 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.124031 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.124048 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.124105 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.124123 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:29Z","lastTransitionTime":"2025-12-05T05:53:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.226918 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.226980 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.226998 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.227021 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.227038 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:29Z","lastTransitionTime":"2025-12-05T05:53:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.330675 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.330731 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.330749 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.330770 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.330788 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:29Z","lastTransitionTime":"2025-12-05T05:53:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.381713 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.381747 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:53:29 crc kubenswrapper[4742]: E1205 05:53:29.381869 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.381911 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.381729 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:53:29 crc kubenswrapper[4742]: E1205 05:53:29.382102 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:53:29 crc kubenswrapper[4742]: E1205 05:53:29.382225 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:53:29 crc kubenswrapper[4742]: E1205 05:53:29.382507 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.434178 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.434249 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.434275 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.434303 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.434326 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:29Z","lastTransitionTime":"2025-12-05T05:53:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.537275 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.537341 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.537358 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.537382 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.537400 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:29Z","lastTransitionTime":"2025-12-05T05:53:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.640233 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.640286 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.640299 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.640317 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.640330 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:29Z","lastTransitionTime":"2025-12-05T05:53:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.743566 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.743645 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.743665 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.743692 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.743711 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:29Z","lastTransitionTime":"2025-12-05T05:53:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.847349 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.847424 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.847444 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.847471 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.847488 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:29Z","lastTransitionTime":"2025-12-05T05:53:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.950955 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.951096 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.951126 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.951157 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:29 crc kubenswrapper[4742]: I1205 05:53:29.951180 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:29Z","lastTransitionTime":"2025-12-05T05:53:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.054501 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.054582 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.054600 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.054626 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.054644 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:30Z","lastTransitionTime":"2025-12-05T05:53:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.158012 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.159011 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.159176 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.159347 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.159472 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:30Z","lastTransitionTime":"2025-12-05T05:53:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.262932 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.263018 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.263036 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.263094 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.263139 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:30Z","lastTransitionTime":"2025-12-05T05:53:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.366034 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.366131 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.366151 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.366174 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.366190 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:30Z","lastTransitionTime":"2025-12-05T05:53:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.469702 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.469766 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.469808 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.469842 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.469864 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:30Z","lastTransitionTime":"2025-12-05T05:53:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.572454 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.572516 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.572536 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.572559 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.572577 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:30Z","lastTransitionTime":"2025-12-05T05:53:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.675651 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.675725 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.675749 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.675782 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.675804 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:30Z","lastTransitionTime":"2025-12-05T05:53:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.777901 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.777943 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.777952 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.777967 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.777977 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:30Z","lastTransitionTime":"2025-12-05T05:53:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.881660 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.881740 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.881766 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.881791 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.881812 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:30Z","lastTransitionTime":"2025-12-05T05:53:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.984835 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.984897 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.984916 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.984940 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:30 crc kubenswrapper[4742]: I1205 05:53:30.984958 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:30Z","lastTransitionTime":"2025-12-05T05:53:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.088080 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.088135 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.088151 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.088173 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.088186 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:31Z","lastTransitionTime":"2025-12-05T05:53:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.190816 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.190885 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.190905 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.190931 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.190949 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:31Z","lastTransitionTime":"2025-12-05T05:53:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.293678 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.293736 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.293752 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.293775 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.293793 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:31Z","lastTransitionTime":"2025-12-05T05:53:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.381928 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.381977 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.382033 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.381938 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:53:31 crc kubenswrapper[4742]: E1205 05:53:31.382213 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:53:31 crc kubenswrapper[4742]: E1205 05:53:31.382347 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:53:31 crc kubenswrapper[4742]: E1205 05:53:31.382454 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:53:31 crc kubenswrapper[4742]: E1205 05:53:31.382661 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.396124 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.396197 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.396219 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.396251 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.396274 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:31Z","lastTransitionTime":"2025-12-05T05:53:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.499347 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.499648 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.499873 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.500048 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.500259 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:31Z","lastTransitionTime":"2025-12-05T05:53:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.603476 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.603540 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.603558 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.603581 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.603600 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:31Z","lastTransitionTime":"2025-12-05T05:53:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.706872 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.706924 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.706941 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.706966 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.706983 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:31Z","lastTransitionTime":"2025-12-05T05:53:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.809743 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.809838 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.809856 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.809879 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.809897 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:31Z","lastTransitionTime":"2025-12-05T05:53:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.913319 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.913393 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.913418 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.913449 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:31 crc kubenswrapper[4742]: I1205 05:53:31.913473 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:31Z","lastTransitionTime":"2025-12-05T05:53:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.016719 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.016802 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.016827 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.016859 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.016889 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:32Z","lastTransitionTime":"2025-12-05T05:53:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.120472 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.120536 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.120557 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.120588 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.120625 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:32Z","lastTransitionTime":"2025-12-05T05:53:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.223152 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.223233 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.223256 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.223289 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.223311 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:32Z","lastTransitionTime":"2025-12-05T05:53:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.327087 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.327159 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.327173 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.327190 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.327201 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:32Z","lastTransitionTime":"2025-12-05T05:53:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.429282 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.429350 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.429367 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.429391 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.429412 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:32Z","lastTransitionTime":"2025-12-05T05:53:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.532006 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.532105 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.532124 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.532149 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.532167 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:32Z","lastTransitionTime":"2025-12-05T05:53:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.635908 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.635977 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.635995 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.636020 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.636037 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:32Z","lastTransitionTime":"2025-12-05T05:53:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.739026 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.739106 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.739130 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.739157 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.739176 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:32Z","lastTransitionTime":"2025-12-05T05:53:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.841813 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.841887 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.841910 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.841938 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.841960 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:32Z","lastTransitionTime":"2025-12-05T05:53:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.944141 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.944209 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.944240 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.944254 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:32 crc kubenswrapper[4742]: I1205 05:53:32.944263 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:32Z","lastTransitionTime":"2025-12-05T05:53:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.047024 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.047173 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.047200 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.047230 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.047252 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:33Z","lastTransitionTime":"2025-12-05T05:53:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.150594 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.150667 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.150693 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.150722 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.150747 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:33Z","lastTransitionTime":"2025-12-05T05:53:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.253708 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.253775 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.253793 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.253815 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.253832 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:33Z","lastTransitionTime":"2025-12-05T05:53:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.357426 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.357495 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.357515 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.357541 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.357559 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:33Z","lastTransitionTime":"2025-12-05T05:53:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.382164 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.382254 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:53:33 crc kubenswrapper[4742]: E1205 05:53:33.382334 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.382276 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.382254 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:53:33 crc kubenswrapper[4742]: E1205 05:53:33.382428 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:53:33 crc kubenswrapper[4742]: E1205 05:53:33.382515 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:53:33 crc kubenswrapper[4742]: E1205 05:53:33.382625 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.460550 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.460642 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.460671 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.460714 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.460757 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:33Z","lastTransitionTime":"2025-12-05T05:53:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.564500 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.564557 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.564575 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.564599 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.564630 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:33Z","lastTransitionTime":"2025-12-05T05:53:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.667520 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.667567 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.667579 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.667596 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.667609 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:33Z","lastTransitionTime":"2025-12-05T05:53:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.775176 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.775254 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.775282 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.775317 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.775347 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:33Z","lastTransitionTime":"2025-12-05T05:53:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.879694 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.879775 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.879793 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.879818 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.879836 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:33Z","lastTransitionTime":"2025-12-05T05:53:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.983858 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.983938 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.983960 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.983995 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:33 crc kubenswrapper[4742]: I1205 05:53:33.984019 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:33Z","lastTransitionTime":"2025-12-05T05:53:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.087346 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.087404 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.087421 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.087443 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.087461 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:34Z","lastTransitionTime":"2025-12-05T05:53:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.190025 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.190132 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.190163 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.190194 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.190215 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:34Z","lastTransitionTime":"2025-12-05T05:53:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.293333 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.293400 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.293420 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.293445 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.293462 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:34Z","lastTransitionTime":"2025-12-05T05:53:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.397580 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.397998 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.398016 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.398036 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.398074 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:34Z","lastTransitionTime":"2025-12-05T05:53:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.398091 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b1b19a-ac1a-4884-b500-1a4e5d2ff816\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7081c0c1a2a04736b851b2891cf22d96332e3361d93479f3fae43034a9fff212\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3232dd64908cc76a49f558975766eaf5423bc83eeb5e4ebd6303600dd0b7bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3232dd64908cc76a49f558975766eaf5423bc83eeb5e4ebd6303600dd0b7bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.420741 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.442354 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.462379 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.484485 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.500787 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.500906 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.500926 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.500950 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.500967 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:34Z","lastTransitionTime":"2025-12-05T05:53:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.503559 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.524529 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.542941 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.561262 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.577660 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6fb3a766-b56c-4b6c-a09e-3666df4accb8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f54b5f75e10a5fd9f43eec7433614b23bef72beb32ff9028155852c09d9b2e7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c03a8d713a4b0a06c47eb28b20328d66f0f3475b56a16ded6f429dd6648e13a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13e953bfbd6033682f3959815eedf4f814d275be3391564618723c3491faee3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d24831966361aea37178bde47fb600caca13eccaf510a19017e7404cb801114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d24831966361aea37178bde47fb600caca13eccaf510a19017e7404cb801114\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.594809 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.603923 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.603971 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.603991 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.604018 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.604035 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:34Z","lastTransitionTime":"2025-12-05T05:53:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.620769 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.646877 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581c48baad5c52f537dbeef4685c29e54b3748256140bac370782fe889a4330d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.680507 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:53:19Z\\\",\\\"message\\\":\\\"1.EgressIP event handler 8\\\\nI1205 05:53:19.555770 6722 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 05:53:19.555814 6722 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 05:53:19.555883 6722 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 05:53:19.555890 6722 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 05:53:19.555896 6722 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 05:53:19.555919 6722 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 05:53:19.555919 6722 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 05:53:19.555929 6722 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 05:53:19.555946 6722 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 05:53:19.555940 6722 factory.go:656] Stopping watch factory\\\\nI1205 05:53:19.555956 6722 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 05:53:19.556100 6722 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1205 05:53:19.556186 6722 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1205 05:53:19.556222 6722 ovnkube.go:599] Stopped ovnkube\\\\nI1205 05:53:19.556245 6722 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 05:53:19.556306 6722 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:53:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-m9jc4_openshift-ovn-kubernetes(06ddc689-50f2-409f-9ac8-8f6a1bed0831)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.697697 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.707098 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.707144 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.707159 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.707180 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.707195 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:34Z","lastTransitionTime":"2025-12-05T05:53:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.721302 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8018f3950c937efad0c3cafc0ce7a20baefa32c9176d2de69397d16610bf422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:53:10Z\\\",\\\"message\\\":\\\"2025-12-05T05:52:24+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_856c13c0-c93e-4630-87d1-ff3d8f16bab5\\\\n2025-12-05T05:52:24+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_856c13c0-c93e-4630-87d1-ff3d8f16bab5 to /host/opt/cni/bin/\\\\n2025-12-05T05:52:25Z [verbose] multus-daemon started\\\\n2025-12-05T05:52:25Z [verbose] Readiness Indicator file check\\\\n2025-12-05T05:53:10Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:53:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.736270 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.750076 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"056de541-2d3a-4782-a2cc-0c96c465ca6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff553f2d53d8dbc9847c9055d7f9bf398e1ce8a955100a41e958800ad730e4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbe2749cda239484a91be5aeb17a4d22cf1e190e814880af130c97a903ee8d33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hv29p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.764394 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pbtb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69352e1-2d48-4211-83e1-25d09fff9d3c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pbtb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:34Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.809595 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.809648 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.809665 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.809689 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.809706 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:34Z","lastTransitionTime":"2025-12-05T05:53:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.912958 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.913015 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.913034 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.913083 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:34 crc kubenswrapper[4742]: I1205 05:53:34.913102 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:34Z","lastTransitionTime":"2025-12-05T05:53:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.016511 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.016578 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.016597 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.016622 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.016639 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:35Z","lastTransitionTime":"2025-12-05T05:53:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.119842 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.119928 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.119952 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.119980 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.119997 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:35Z","lastTransitionTime":"2025-12-05T05:53:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.222686 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.222726 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.222737 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.222753 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.222766 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:35Z","lastTransitionTime":"2025-12-05T05:53:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.325633 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.325696 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.325715 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.325742 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.325761 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:35Z","lastTransitionTime":"2025-12-05T05:53:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.382339 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.382411 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.382339 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:53:35 crc kubenswrapper[4742]: E1205 05:53:35.382551 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:53:35 crc kubenswrapper[4742]: E1205 05:53:35.382707 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.382939 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:53:35 crc kubenswrapper[4742]: E1205 05:53:35.382966 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:53:35 crc kubenswrapper[4742]: E1205 05:53:35.383397 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.428713 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.428767 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.428786 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.428809 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.428831 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:35Z","lastTransitionTime":"2025-12-05T05:53:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.532128 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.532214 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.532223 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.532235 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.532244 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:35Z","lastTransitionTime":"2025-12-05T05:53:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.635344 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.635413 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.635447 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.635484 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.635507 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:35Z","lastTransitionTime":"2025-12-05T05:53:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.739250 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.739310 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.739323 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.739345 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.739359 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:35Z","lastTransitionTime":"2025-12-05T05:53:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.842232 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.842274 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.842285 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.842328 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.842341 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:35Z","lastTransitionTime":"2025-12-05T05:53:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.945249 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.945299 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.945315 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.945336 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:35 crc kubenswrapper[4742]: I1205 05:53:35.945353 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:35Z","lastTransitionTime":"2025-12-05T05:53:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.048391 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.048448 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.048460 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.048488 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.048503 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:36Z","lastTransitionTime":"2025-12-05T05:53:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.152242 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.152308 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.152328 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.152352 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.152370 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:36Z","lastTransitionTime":"2025-12-05T05:53:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.254877 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.254957 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.254976 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.255003 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.255027 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:36Z","lastTransitionTime":"2025-12-05T05:53:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.358556 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.358622 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.358641 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.358665 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.358681 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:36Z","lastTransitionTime":"2025-12-05T05:53:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.383411 4742 scope.go:117] "RemoveContainer" containerID="d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251" Dec 05 05:53:36 crc kubenswrapper[4742]: E1205 05:53:36.383692 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-m9jc4_openshift-ovn-kubernetes(06ddc689-50f2-409f-9ac8-8f6a1bed0831)\"" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.461995 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.462091 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.462110 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.462138 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.462158 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:36Z","lastTransitionTime":"2025-12-05T05:53:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.565268 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.565337 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.565356 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.565387 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.565403 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:36Z","lastTransitionTime":"2025-12-05T05:53:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.669167 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.669232 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.669249 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.669281 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.669301 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:36Z","lastTransitionTime":"2025-12-05T05:53:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.773357 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.773412 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.773426 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.773447 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.773466 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:36Z","lastTransitionTime":"2025-12-05T05:53:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.876656 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.876729 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.876751 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.876781 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.876805 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:36Z","lastTransitionTime":"2025-12-05T05:53:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.979618 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.979794 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.979862 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.979891 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:36 crc kubenswrapper[4742]: I1205 05:53:36.979912 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:36Z","lastTransitionTime":"2025-12-05T05:53:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.082971 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.083044 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.083077 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.083096 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.083109 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:37Z","lastTransitionTime":"2025-12-05T05:53:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.186821 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.186903 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.186922 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.186952 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.186973 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:37Z","lastTransitionTime":"2025-12-05T05:53:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.290135 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.290186 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.290203 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.290226 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.290244 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:37Z","lastTransitionTime":"2025-12-05T05:53:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.382239 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.382357 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.382398 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:53:37 crc kubenswrapper[4742]: E1205 05:53:37.382525 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.382578 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:53:37 crc kubenswrapper[4742]: E1205 05:53:37.382740 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:53:37 crc kubenswrapper[4742]: E1205 05:53:37.382845 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:53:37 crc kubenswrapper[4742]: E1205 05:53:37.382941 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.393269 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.393348 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.393387 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.393424 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.393447 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:37Z","lastTransitionTime":"2025-12-05T05:53:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.496523 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.496582 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.496605 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.496633 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.496652 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:37Z","lastTransitionTime":"2025-12-05T05:53:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.600148 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.600236 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.600256 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.600282 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.600300 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:37Z","lastTransitionTime":"2025-12-05T05:53:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.703552 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.703612 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.703630 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.703652 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.703670 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:37Z","lastTransitionTime":"2025-12-05T05:53:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.806076 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.806115 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.806128 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.806142 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.806153 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:37Z","lastTransitionTime":"2025-12-05T05:53:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.886763 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.886813 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.886832 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.886859 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.886897 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:37Z","lastTransitionTime":"2025-12-05T05:53:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:37 crc kubenswrapper[4742]: E1205 05:53:37.906876 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:37Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.912585 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.912640 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.912656 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.912681 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.912698 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:37Z","lastTransitionTime":"2025-12-05T05:53:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:37 crc kubenswrapper[4742]: E1205 05:53:37.931955 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:37Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.936426 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.936477 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.936495 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.936516 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.936533 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:37Z","lastTransitionTime":"2025-12-05T05:53:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:37 crc kubenswrapper[4742]: E1205 05:53:37.955969 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:37Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.960821 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.960882 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.960899 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.960923 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.960939 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:37Z","lastTransitionTime":"2025-12-05T05:53:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:37 crc kubenswrapper[4742]: E1205 05:53:37.981308 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:37Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.985887 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.985976 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.985997 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.986080 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:37 crc kubenswrapper[4742]: I1205 05:53:37.986104 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:37Z","lastTransitionTime":"2025-12-05T05:53:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:38 crc kubenswrapper[4742]: E1205 05:53:38.005742 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:38Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:38 crc kubenswrapper[4742]: E1205 05:53:38.005959 4742 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.008162 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.008209 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.008226 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.008250 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.008268 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:38Z","lastTransitionTime":"2025-12-05T05:53:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.111121 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.111193 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.111219 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.111247 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.111269 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:38Z","lastTransitionTime":"2025-12-05T05:53:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.214209 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.214260 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.214276 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.214299 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.214317 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:38Z","lastTransitionTime":"2025-12-05T05:53:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.317372 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.317686 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.317858 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.318006 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.318220 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:38Z","lastTransitionTime":"2025-12-05T05:53:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.421030 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.421108 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.421121 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.421141 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.421153 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:38Z","lastTransitionTime":"2025-12-05T05:53:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.523409 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.523815 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.524024 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.524299 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.524516 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:38Z","lastTransitionTime":"2025-12-05T05:53:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.627720 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.627791 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.627810 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.627834 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.627853 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:38Z","lastTransitionTime":"2025-12-05T05:53:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.730657 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.730714 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.730730 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.730753 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.730770 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:38Z","lastTransitionTime":"2025-12-05T05:53:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.833737 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.833784 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.833801 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.833831 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.833855 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:38Z","lastTransitionTime":"2025-12-05T05:53:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.937349 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.937408 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.937425 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.937448 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:38 crc kubenswrapper[4742]: I1205 05:53:38.937464 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:38Z","lastTransitionTime":"2025-12-05T05:53:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.051510 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.051557 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.051569 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.051588 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.051601 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:39Z","lastTransitionTime":"2025-12-05T05:53:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.154928 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.154995 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.155024 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.155050 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.155128 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:39Z","lastTransitionTime":"2025-12-05T05:53:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.257831 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.257899 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.257917 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.258157 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.258191 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:39Z","lastTransitionTime":"2025-12-05T05:53:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.361963 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.362022 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.362040 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.362095 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.362116 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:39Z","lastTransitionTime":"2025-12-05T05:53:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.382017 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.382112 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.382154 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.382071 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:53:39 crc kubenswrapper[4742]: E1205 05:53:39.382293 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:53:39 crc kubenswrapper[4742]: E1205 05:53:39.382463 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:53:39 crc kubenswrapper[4742]: E1205 05:53:39.382597 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:53:39 crc kubenswrapper[4742]: E1205 05:53:39.382681 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.464713 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.464812 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.464836 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.464867 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.464890 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:39Z","lastTransitionTime":"2025-12-05T05:53:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.567902 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.567964 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.567982 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.568006 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.568022 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:39Z","lastTransitionTime":"2025-12-05T05:53:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.670802 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.670866 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.670890 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.670917 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.670933 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:39Z","lastTransitionTime":"2025-12-05T05:53:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.773911 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.773997 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.774031 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.774104 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.774128 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:39Z","lastTransitionTime":"2025-12-05T05:53:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.877089 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.877186 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.877214 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.877243 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.877263 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:39Z","lastTransitionTime":"2025-12-05T05:53:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.980128 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.980172 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.980180 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.980195 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:39 crc kubenswrapper[4742]: I1205 05:53:39.980207 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:39Z","lastTransitionTime":"2025-12-05T05:53:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.083008 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.083122 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.083148 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.083177 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.083201 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:40Z","lastTransitionTime":"2025-12-05T05:53:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.186484 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.186548 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.186572 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.186601 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.186626 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:40Z","lastTransitionTime":"2025-12-05T05:53:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.289833 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.289884 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.289900 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.289921 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.289937 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:40Z","lastTransitionTime":"2025-12-05T05:53:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.392337 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.392394 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.392412 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.392472 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.392490 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:40Z","lastTransitionTime":"2025-12-05T05:53:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.495296 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.495349 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.495367 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.495391 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.495408 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:40Z","lastTransitionTime":"2025-12-05T05:53:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.598652 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.598724 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.598744 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.598769 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.598788 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:40Z","lastTransitionTime":"2025-12-05T05:53:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.701809 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.701868 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.701885 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.701909 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.701926 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:40Z","lastTransitionTime":"2025-12-05T05:53:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.805009 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.805121 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.805146 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.805176 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.805200 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:40Z","lastTransitionTime":"2025-12-05T05:53:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.908248 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.908319 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.908342 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.908374 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:40 crc kubenswrapper[4742]: I1205 05:53:40.908393 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:40Z","lastTransitionTime":"2025-12-05T05:53:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.011486 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.011558 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.011580 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.011610 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.011632 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:41Z","lastTransitionTime":"2025-12-05T05:53:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.115524 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.115624 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.115643 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.115666 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.115686 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:41Z","lastTransitionTime":"2025-12-05T05:53:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.219000 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.219123 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.219149 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.219181 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.219205 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:41Z","lastTransitionTime":"2025-12-05T05:53:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.322522 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.322603 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.322628 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.322661 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.322684 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:41Z","lastTransitionTime":"2025-12-05T05:53:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.382112 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.382189 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.382202 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.382149 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:53:41 crc kubenswrapper[4742]: E1205 05:53:41.382381 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:53:41 crc kubenswrapper[4742]: E1205 05:53:41.382560 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:53:41 crc kubenswrapper[4742]: E1205 05:53:41.382712 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:53:41 crc kubenswrapper[4742]: E1205 05:53:41.382856 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.426159 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.426232 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.426251 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.426278 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.426299 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:41Z","lastTransitionTime":"2025-12-05T05:53:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.529718 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.529784 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.529801 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.529824 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.529843 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:41Z","lastTransitionTime":"2025-12-05T05:53:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.633646 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.633751 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.633768 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.633793 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.633812 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:41Z","lastTransitionTime":"2025-12-05T05:53:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.736997 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.737092 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.737115 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.737138 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.737156 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:41Z","lastTransitionTime":"2025-12-05T05:53:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.840296 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.840369 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.840387 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.840414 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.840433 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:41Z","lastTransitionTime":"2025-12-05T05:53:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.907519 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b69352e1-2d48-4211-83e1-25d09fff9d3c-metrics-certs\") pod \"network-metrics-daemon-pbtb4\" (UID: \"b69352e1-2d48-4211-83e1-25d09fff9d3c\") " pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:53:41 crc kubenswrapper[4742]: E1205 05:53:41.907841 4742 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 05:53:41 crc kubenswrapper[4742]: E1205 05:53:41.907998 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b69352e1-2d48-4211-83e1-25d09fff9d3c-metrics-certs podName:b69352e1-2d48-4211-83e1-25d09fff9d3c nodeName:}" failed. No retries permitted until 2025-12-05 05:54:45.907966528 +0000 UTC m=+161.820101630 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b69352e1-2d48-4211-83e1-25d09fff9d3c-metrics-certs") pod "network-metrics-daemon-pbtb4" (UID: "b69352e1-2d48-4211-83e1-25d09fff9d3c") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.943771 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.943832 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.943849 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.943873 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:41 crc kubenswrapper[4742]: I1205 05:53:41.943890 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:41Z","lastTransitionTime":"2025-12-05T05:53:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.048095 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.048172 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.048192 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.048223 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.048245 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:42Z","lastTransitionTime":"2025-12-05T05:53:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.150937 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.150988 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.151006 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.151029 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.151047 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:42Z","lastTransitionTime":"2025-12-05T05:53:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.254800 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.254864 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.254882 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.254911 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.254932 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:42Z","lastTransitionTime":"2025-12-05T05:53:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.359224 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.359309 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.359333 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.359464 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.359490 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:42Z","lastTransitionTime":"2025-12-05T05:53:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.463107 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.463168 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.463186 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.463209 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.463228 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:42Z","lastTransitionTime":"2025-12-05T05:53:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.566613 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.566698 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.566739 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.566773 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.566795 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:42Z","lastTransitionTime":"2025-12-05T05:53:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.670484 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.670562 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.670586 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.670617 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.670641 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:42Z","lastTransitionTime":"2025-12-05T05:53:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.774499 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.774608 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.774630 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.774692 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.774723 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:42Z","lastTransitionTime":"2025-12-05T05:53:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.877671 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.877743 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.877767 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.877796 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.877812 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:42Z","lastTransitionTime":"2025-12-05T05:53:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.981315 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.981372 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.981389 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.981414 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:42 crc kubenswrapper[4742]: I1205 05:53:42.981434 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:42Z","lastTransitionTime":"2025-12-05T05:53:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.085149 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.085256 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.085318 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.085345 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.085403 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:43Z","lastTransitionTime":"2025-12-05T05:53:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.189242 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.189296 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.189314 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.189338 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.189357 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:43Z","lastTransitionTime":"2025-12-05T05:53:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.292858 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.292930 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.292951 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.292977 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.292997 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:43Z","lastTransitionTime":"2025-12-05T05:53:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.382496 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.382496 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:53:43 crc kubenswrapper[4742]: E1205 05:53:43.382653 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.382726 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:53:43 crc kubenswrapper[4742]: E1205 05:53:43.382910 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.383095 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:53:43 crc kubenswrapper[4742]: E1205 05:53:43.383174 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:53:43 crc kubenswrapper[4742]: E1205 05:53:43.383051 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.396252 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.396322 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.396345 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.396369 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.396385 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:43Z","lastTransitionTime":"2025-12-05T05:53:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.500461 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.500528 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.500560 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.500589 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.500614 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:43Z","lastTransitionTime":"2025-12-05T05:53:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.604816 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.604890 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.604910 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.604936 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.604956 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:43Z","lastTransitionTime":"2025-12-05T05:53:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.708620 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.708674 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.708694 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.708720 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.708741 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:43Z","lastTransitionTime":"2025-12-05T05:53:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.812315 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.812716 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.812958 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.813182 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.813388 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:43Z","lastTransitionTime":"2025-12-05T05:53:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.916558 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.916903 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.917120 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.917300 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:43 crc kubenswrapper[4742]: I1205 05:53:43.917465 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:43Z","lastTransitionTime":"2025-12-05T05:53:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.021374 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.021440 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.021463 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.021494 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.021516 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:44Z","lastTransitionTime":"2025-12-05T05:53:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.125262 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.125342 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.125360 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.125385 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.125403 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:44Z","lastTransitionTime":"2025-12-05T05:53:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.229118 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.229191 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.229209 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.229236 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.229257 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:44Z","lastTransitionTime":"2025-12-05T05:53:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.332525 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.332579 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.332599 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.332625 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.332644 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:44Z","lastTransitionTime":"2025-12-05T05:53:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.400547 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6fb3a766-b56c-4b6c-a09e-3666df4accb8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f54b5f75e10a5fd9f43eec7433614b23bef72beb32ff9028155852c09d9b2e7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c03a8d713a4b0a06c47eb28b20328d66f0f3475b56a16ded6f429dd6648e13a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13e953bfbd6033682f3959815eedf4f814d275be3391564618723c3491faee3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d24831966361aea37178bde47fb600caca13eccaf510a19017e7404cb801114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d24831966361aea37178bde47fb600caca13eccaf510a19017e7404cb801114\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.422967 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.435844 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.435945 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.435976 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.436011 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.436034 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:44Z","lastTransitionTime":"2025-12-05T05:53:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.449267 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.477538 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581c48baad5c52f537dbeef4685c29e54b3748256140bac370782fe889a4330d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.514748 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:53:19Z\\\",\\\"message\\\":\\\"1.EgressIP event handler 8\\\\nI1205 05:53:19.555770 6722 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 05:53:19.555814 6722 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 05:53:19.555883 6722 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 05:53:19.555890 6722 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 05:53:19.555896 6722 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 05:53:19.555919 6722 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 05:53:19.555919 6722 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 05:53:19.555929 6722 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 05:53:19.555946 6722 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 05:53:19.555940 6722 factory.go:656] Stopping watch factory\\\\nI1205 05:53:19.555956 6722 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 05:53:19.556100 6722 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1205 05:53:19.556186 6722 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1205 05:53:19.556222 6722 ovnkube.go:599] Stopped ovnkube\\\\nI1205 05:53:19.556245 6722 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 05:53:19.556306 6722 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:53:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-m9jc4_openshift-ovn-kubernetes(06ddc689-50f2-409f-9ac8-8f6a1bed0831)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.533410 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.538693 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.538758 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.538783 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.538814 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.538835 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:44Z","lastTransitionTime":"2025-12-05T05:53:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.556042 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8018f3950c937efad0c3cafc0ce7a20baefa32c9176d2de69397d16610bf422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:53:10Z\\\",\\\"message\\\":\\\"2025-12-05T05:52:24+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_856c13c0-c93e-4630-87d1-ff3d8f16bab5\\\\n2025-12-05T05:52:24+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_856c13c0-c93e-4630-87d1-ff3d8f16bab5 to /host/opt/cni/bin/\\\\n2025-12-05T05:52:25Z [verbose] multus-daemon started\\\\n2025-12-05T05:52:25Z [verbose] Readiness Indicator file check\\\\n2025-12-05T05:53:10Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:53:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.575185 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.596036 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"056de541-2d3a-4782-a2cc-0c96c465ca6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff553f2d53d8dbc9847c9055d7f9bf398e1ce8a955100a41e958800ad730e4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbe2749cda239484a91be5aeb17a4d22cf1e190e814880af130c97a903ee8d33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hv29p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.616966 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pbtb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69352e1-2d48-4211-83e1-25d09fff9d3c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pbtb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.636989 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b1b19a-ac1a-4884-b500-1a4e5d2ff816\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7081c0c1a2a04736b851b2891cf22d96332e3361d93479f3fae43034a9fff212\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3232dd64908cc76a49f558975766eaf5423bc83eeb5e4ebd6303600dd0b7bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3232dd64908cc76a49f558975766eaf5423bc83eeb5e4ebd6303600dd0b7bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.642540 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.642576 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.642617 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.642634 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.642647 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:44Z","lastTransitionTime":"2025-12-05T05:53:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.675486 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.698769 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.718398 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.738324 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.745837 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.745892 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.745913 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.745940 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.745958 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:44Z","lastTransitionTime":"2025-12-05T05:53:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.758559 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.779611 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.795813 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.815024 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:44Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.849389 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.849474 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.849501 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.849540 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.849563 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:44Z","lastTransitionTime":"2025-12-05T05:53:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.952800 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.952871 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.952889 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.952914 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:44 crc kubenswrapper[4742]: I1205 05:53:44.952938 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:44Z","lastTransitionTime":"2025-12-05T05:53:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.055386 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.055449 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.055474 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.055503 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.055526 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:45Z","lastTransitionTime":"2025-12-05T05:53:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.159583 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.159670 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.159690 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.159720 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.159758 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:45Z","lastTransitionTime":"2025-12-05T05:53:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.263397 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.263547 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.263579 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.263616 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.263639 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:45Z","lastTransitionTime":"2025-12-05T05:53:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.367908 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.368098 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.368137 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.368168 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.368185 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:45Z","lastTransitionTime":"2025-12-05T05:53:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.382423 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.382506 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.382534 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.382717 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:53:45 crc kubenswrapper[4742]: E1205 05:53:45.382695 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:53:45 crc kubenswrapper[4742]: E1205 05:53:45.382933 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:53:45 crc kubenswrapper[4742]: E1205 05:53:45.383048 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:53:45 crc kubenswrapper[4742]: E1205 05:53:45.383227 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.471675 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.471729 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.471748 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.471772 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.471789 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:45Z","lastTransitionTime":"2025-12-05T05:53:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.574411 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.574478 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.574496 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.574521 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.574540 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:45Z","lastTransitionTime":"2025-12-05T05:53:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.677750 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.677802 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.677821 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.677846 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.677863 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:45Z","lastTransitionTime":"2025-12-05T05:53:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.780836 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.780925 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.780951 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.780989 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.781014 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:45Z","lastTransitionTime":"2025-12-05T05:53:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.884421 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.884493 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.884516 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.884546 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.884571 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:45Z","lastTransitionTime":"2025-12-05T05:53:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.987675 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.987731 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.987748 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.987772 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:45 crc kubenswrapper[4742]: I1205 05:53:45.987789 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:45Z","lastTransitionTime":"2025-12-05T05:53:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.090689 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.090764 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.090784 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.090815 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.090835 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:46Z","lastTransitionTime":"2025-12-05T05:53:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.193657 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.193728 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.193749 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.193774 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.193794 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:46Z","lastTransitionTime":"2025-12-05T05:53:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.297749 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.297809 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.297822 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.297846 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.297858 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:46Z","lastTransitionTime":"2025-12-05T05:53:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.400388 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.400453 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.400472 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.400497 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.400513 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:46Z","lastTransitionTime":"2025-12-05T05:53:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.503879 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.503953 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.503969 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.503996 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.504015 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:46Z","lastTransitionTime":"2025-12-05T05:53:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.608660 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.608739 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.608761 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.608790 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.608810 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:46Z","lastTransitionTime":"2025-12-05T05:53:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.711481 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.711555 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.711577 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.711609 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.711634 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:46Z","lastTransitionTime":"2025-12-05T05:53:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.815697 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.815744 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.815760 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.815783 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.815816 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:46Z","lastTransitionTime":"2025-12-05T05:53:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.919985 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.920106 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.920121 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.920142 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:46 crc kubenswrapper[4742]: I1205 05:53:46.920157 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:46Z","lastTransitionTime":"2025-12-05T05:53:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.023348 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.023437 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.023463 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.023514 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.023531 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:47Z","lastTransitionTime":"2025-12-05T05:53:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.127245 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.127518 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.127537 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.127556 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.127572 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:47Z","lastTransitionTime":"2025-12-05T05:53:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.230801 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.230877 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.230897 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.230921 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.230939 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:47Z","lastTransitionTime":"2025-12-05T05:53:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.333081 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.333122 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.333137 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.333157 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.333174 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:47Z","lastTransitionTime":"2025-12-05T05:53:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.382336 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.382406 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.382419 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:53:47 crc kubenswrapper[4742]: E1205 05:53:47.382552 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.382643 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:53:47 crc kubenswrapper[4742]: E1205 05:53:47.382796 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:53:47 crc kubenswrapper[4742]: E1205 05:53:47.382878 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:53:47 crc kubenswrapper[4742]: E1205 05:53:47.383052 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.436576 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.436626 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.436643 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.436667 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.436686 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:47Z","lastTransitionTime":"2025-12-05T05:53:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.539963 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.540049 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.540105 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.540140 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.540167 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:47Z","lastTransitionTime":"2025-12-05T05:53:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.643782 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.643841 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.643857 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.643880 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.643897 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:47Z","lastTransitionTime":"2025-12-05T05:53:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.747456 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.747514 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.747531 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.747555 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.747575 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:47Z","lastTransitionTime":"2025-12-05T05:53:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.850663 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.850724 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.850747 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.850781 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.850807 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:47Z","lastTransitionTime":"2025-12-05T05:53:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.953804 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.953862 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.953879 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.953903 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:47 crc kubenswrapper[4742]: I1205 05:53:47.953920 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:47Z","lastTransitionTime":"2025-12-05T05:53:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.057683 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.057790 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.057809 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.057838 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.057855 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:48Z","lastTransitionTime":"2025-12-05T05:53:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.111764 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.111829 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.111848 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.111872 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.111890 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:48Z","lastTransitionTime":"2025-12-05T05:53:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:48 crc kubenswrapper[4742]: E1205 05:53:48.131321 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:48Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.136428 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.136489 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.136515 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.136546 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.136570 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:48Z","lastTransitionTime":"2025-12-05T05:53:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:48 crc kubenswrapper[4742]: E1205 05:53:48.157160 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:48Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.161875 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.161928 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.161953 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.161980 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.162001 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:48Z","lastTransitionTime":"2025-12-05T05:53:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:48 crc kubenswrapper[4742]: E1205 05:53:48.180604 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:48Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.186244 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.186346 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.186366 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.186390 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.186407 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:48Z","lastTransitionTime":"2025-12-05T05:53:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:48 crc kubenswrapper[4742]: E1205 05:53:48.208155 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:48Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.212906 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.212952 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.212967 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.212990 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.213006 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:48Z","lastTransitionTime":"2025-12-05T05:53:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:48 crc kubenswrapper[4742]: E1205 05:53:48.231551 4742 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"665ecdae-ddb3-49af-8a22-677c4e53c8f0\\\",\\\"systemUUID\\\":\\\"f65f0fdf-abb3-4467-8810-c82e92a7b58d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:48Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:48 crc kubenswrapper[4742]: E1205 05:53:48.231705 4742 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.233967 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.233994 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.234004 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.234032 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.234043 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:48Z","lastTransitionTime":"2025-12-05T05:53:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.337862 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.337938 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.337957 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.337982 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.338000 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:48Z","lastTransitionTime":"2025-12-05T05:53:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.440952 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.441005 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.441024 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.441044 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.441098 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:48Z","lastTransitionTime":"2025-12-05T05:53:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.549958 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.550094 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.550128 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.550165 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.550190 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:48Z","lastTransitionTime":"2025-12-05T05:53:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.653239 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.653304 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.653328 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.653369 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.653394 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:48Z","lastTransitionTime":"2025-12-05T05:53:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.755953 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.756005 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.756017 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.756035 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.756048 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:48Z","lastTransitionTime":"2025-12-05T05:53:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.859218 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.859280 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.859300 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.859324 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.859342 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:48Z","lastTransitionTime":"2025-12-05T05:53:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.962916 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.962989 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.963008 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.963032 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:48 crc kubenswrapper[4742]: I1205 05:53:48.963051 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:48Z","lastTransitionTime":"2025-12-05T05:53:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.066444 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.066524 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.066542 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.066567 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.066585 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:49Z","lastTransitionTime":"2025-12-05T05:53:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.169735 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.169807 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.169823 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.169853 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.169871 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:49Z","lastTransitionTime":"2025-12-05T05:53:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.272389 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.272457 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.272479 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.272507 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.272530 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:49Z","lastTransitionTime":"2025-12-05T05:53:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.374725 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.374794 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.374812 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.374841 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.374861 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:49Z","lastTransitionTime":"2025-12-05T05:53:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.382097 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.382202 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.382106 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.382203 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:53:49 crc kubenswrapper[4742]: E1205 05:53:49.382276 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:53:49 crc kubenswrapper[4742]: E1205 05:53:49.382372 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:53:49 crc kubenswrapper[4742]: E1205 05:53:49.383229 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:53:49 crc kubenswrapper[4742]: E1205 05:53:49.383308 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.383815 4742 scope.go:117] "RemoveContainer" containerID="d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251" Dec 05 05:53:49 crc kubenswrapper[4742]: E1205 05:53:49.384201 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-m9jc4_openshift-ovn-kubernetes(06ddc689-50f2-409f-9ac8-8f6a1bed0831)\"" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.476785 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.476853 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.476873 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.476897 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.476914 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:49Z","lastTransitionTime":"2025-12-05T05:53:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.579632 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.579677 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.579689 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.579705 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.579717 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:49Z","lastTransitionTime":"2025-12-05T05:53:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.682609 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.682676 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.682692 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.682719 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.682739 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:49Z","lastTransitionTime":"2025-12-05T05:53:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.785741 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.785817 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.785840 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.785870 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.785891 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:49Z","lastTransitionTime":"2025-12-05T05:53:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.888732 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.888814 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.888833 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.888859 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.888881 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:49Z","lastTransitionTime":"2025-12-05T05:53:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.992125 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.992255 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.992268 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.992285 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:49 crc kubenswrapper[4742]: I1205 05:53:49.992296 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:49Z","lastTransitionTime":"2025-12-05T05:53:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.095815 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.095898 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.095919 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.095945 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.095966 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:50Z","lastTransitionTime":"2025-12-05T05:53:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.206285 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.206352 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.206368 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.206395 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.206417 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:50Z","lastTransitionTime":"2025-12-05T05:53:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.309093 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.309149 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.309169 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.309195 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.309218 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:50Z","lastTransitionTime":"2025-12-05T05:53:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.411630 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.412104 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.412249 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.412401 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.412540 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:50Z","lastTransitionTime":"2025-12-05T05:53:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.515312 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.515387 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.515418 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.515437 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.515449 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:50Z","lastTransitionTime":"2025-12-05T05:53:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.618197 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.618514 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.618811 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.619179 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.619481 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:50Z","lastTransitionTime":"2025-12-05T05:53:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.722872 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.722997 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.723015 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.723037 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.723086 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:50Z","lastTransitionTime":"2025-12-05T05:53:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.825271 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.825628 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.825858 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.826165 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.826408 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:50Z","lastTransitionTime":"2025-12-05T05:53:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.929874 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.929948 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.929970 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.930001 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:50 crc kubenswrapper[4742]: I1205 05:53:50.930021 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:50Z","lastTransitionTime":"2025-12-05T05:53:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.036997 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.037149 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.037187 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.037222 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.037247 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:51Z","lastTransitionTime":"2025-12-05T05:53:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.140553 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.140608 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.140625 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.140646 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.140663 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:51Z","lastTransitionTime":"2025-12-05T05:53:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.243368 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.243445 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.243465 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.243491 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.243511 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:51Z","lastTransitionTime":"2025-12-05T05:53:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.346349 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.346421 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.346447 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.346477 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.346501 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:51Z","lastTransitionTime":"2025-12-05T05:53:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.382172 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.382256 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.382262 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:53:51 crc kubenswrapper[4742]: E1205 05:53:51.382371 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.382496 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:53:51 crc kubenswrapper[4742]: E1205 05:53:51.382602 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:53:51 crc kubenswrapper[4742]: E1205 05:53:51.382706 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:53:51 crc kubenswrapper[4742]: E1205 05:53:51.382808 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.449511 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.449574 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.449593 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.449616 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.449634 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:51Z","lastTransitionTime":"2025-12-05T05:53:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.552395 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.552482 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.552503 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.552525 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.552541 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:51Z","lastTransitionTime":"2025-12-05T05:53:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.655894 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.656277 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.656402 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.656558 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.656676 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:51Z","lastTransitionTime":"2025-12-05T05:53:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.759740 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.759823 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.759848 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.759877 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.759901 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:51Z","lastTransitionTime":"2025-12-05T05:53:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.862903 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.862979 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.863003 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.863039 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.863096 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:51Z","lastTransitionTime":"2025-12-05T05:53:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.965966 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.966026 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.966049 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.966129 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:51 crc kubenswrapper[4742]: I1205 05:53:51.966152 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:51Z","lastTransitionTime":"2025-12-05T05:53:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.068289 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.068322 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.068330 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.068342 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.068351 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:52Z","lastTransitionTime":"2025-12-05T05:53:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.171681 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.172180 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.172347 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.172489 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.172622 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:52Z","lastTransitionTime":"2025-12-05T05:53:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.276405 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.276868 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.277116 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.277369 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.277518 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:52Z","lastTransitionTime":"2025-12-05T05:53:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.381027 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.381119 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.381137 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.381166 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.381181 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:52Z","lastTransitionTime":"2025-12-05T05:53:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.484382 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.484434 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.484453 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.484475 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.484491 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:52Z","lastTransitionTime":"2025-12-05T05:53:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.587418 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.587472 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.587490 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.587512 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.587530 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:52Z","lastTransitionTime":"2025-12-05T05:53:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.691309 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.691409 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.691427 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.691452 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.691472 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:52Z","lastTransitionTime":"2025-12-05T05:53:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.795407 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.795759 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.795906 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.796033 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.796234 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:52Z","lastTransitionTime":"2025-12-05T05:53:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.899431 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.899910 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.900052 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.900272 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:52 crc kubenswrapper[4742]: I1205 05:53:52.900404 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:52Z","lastTransitionTime":"2025-12-05T05:53:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.003337 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.003398 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.003416 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.003439 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.003456 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:53Z","lastTransitionTime":"2025-12-05T05:53:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.106225 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.106279 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.106295 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.106318 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.106335 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:53Z","lastTransitionTime":"2025-12-05T05:53:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.208891 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.208957 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.208974 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.208998 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.209015 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:53Z","lastTransitionTime":"2025-12-05T05:53:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.311918 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.311978 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.311994 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.312018 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.312035 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:53Z","lastTransitionTime":"2025-12-05T05:53:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.382320 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.382328 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.382374 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.382491 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:53:53 crc kubenswrapper[4742]: E1205 05:53:53.382654 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:53:53 crc kubenswrapper[4742]: E1205 05:53:53.382807 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:53:53 crc kubenswrapper[4742]: E1205 05:53:53.383194 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:53:53 crc kubenswrapper[4742]: E1205 05:53:53.383554 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.419880 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.419962 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.419988 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.420020 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.420043 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:53Z","lastTransitionTime":"2025-12-05T05:53:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.523943 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.524018 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.524052 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.524134 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.524166 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:53Z","lastTransitionTime":"2025-12-05T05:53:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.626717 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.626777 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.626813 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.626841 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.626866 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:53Z","lastTransitionTime":"2025-12-05T05:53:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.729511 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.729585 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.729618 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.729649 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.729670 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:53Z","lastTransitionTime":"2025-12-05T05:53:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.832572 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.832645 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.832734 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.832765 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.832787 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:53Z","lastTransitionTime":"2025-12-05T05:53:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.936553 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.936733 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.936757 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.936786 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:53 crc kubenswrapper[4742]: I1205 05:53:53.936806 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:53Z","lastTransitionTime":"2025-12-05T05:53:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.041928 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.041992 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.042010 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.042041 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.042097 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:54Z","lastTransitionTime":"2025-12-05T05:53:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.145306 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.145393 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.145428 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.145458 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.145476 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:54Z","lastTransitionTime":"2025-12-05T05:53:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.248196 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.248582 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.248734 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.248881 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.249044 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:54Z","lastTransitionTime":"2025-12-05T05:53:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.352381 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.352451 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.352471 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.352496 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.352521 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:54Z","lastTransitionTime":"2025-12-05T05:53:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.397772 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a5b1b19a-ac1a-4884-b500-1a4e5d2ff816\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7081c0c1a2a04736b851b2891cf22d96332e3361d93479f3fae43034a9fff212\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3232dd64908cc76a49f558975766eaf5423bc83eeb5e4ebd6303600dd0b7bea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a3232dd64908cc76a49f558975766eaf5423bc83eeb5e4ebd6303600dd0b7bea\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.430323 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2d12578-a632-459f-bcb0-a22792d3a64f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5151439ec0c0bfe441f395bf7b8c80a67e3b690b60964a2023c94c58662de71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a08b588e2a1752578808b9444f946c047b0082212ef49995fb457c8c9c1ab3bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd50bd129fc780dd8fbc4fec81d0ea9fedeaedc564fbe3493fe01c6e3b229117\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb5cd90673e38801212aaba4b73882bf58b469ab57539483ecc741100b372a7b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0312a5eee94350c82a3433635bbf705975419ff773aa7f3e3c9c14a49bf1bfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6deb6ec6baa448c8b85db21f36a46925d4881ecc6c6ac550e4594d8547006f4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2c02f7e6aea0ebfcd1bed7803d4f84cd1d33af1700f666f8130c1afc97b0d98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d56567adaa16a1575772ac302c8709812fec3a99278bda7f615b1240c44e2b70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.449841 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5ea18901-cdec-4f7a-96c3-610bb6b9eef5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T05:52:22Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 05:52:16.890522 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 05:52:16.891705 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2498818554/tls.crt::/tmp/serving-cert-2498818554/tls.key\\\\\\\"\\\\nI1205 05:52:22.727364 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 05:52:22.729903 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 05:52:22.729926 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 05:52:22.729947 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 05:52:22.729967 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 05:52:22.744191 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 05:52:22.744208 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 05:52:22.744230 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744236 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 05:52:22.744270 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 05:52:22.744275 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 05:52:22.744280 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 05:52:22.744284 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 05:52:22.747549 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.455280 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.455619 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.455786 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.455948 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.456350 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:54Z","lastTransitionTime":"2025-12-05T05:53:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.469690 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.486029 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1caeab4c-85a7-4204-adcc-d731552fd891\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5164328ddd945d29cc64883f2cc0fb06d85c11eb9f87ac22ff0ce66090fa0588\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://97bb6a7ab3626f857c3a8576f8fcb1d89f5d2a9db2429407cb20c4cd17de8601\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa8d6b533ac5e64e44e684d52142d0724a95b851919b10d07b0c8c547fbab342\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.507510 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.521672 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3cd64cf9696f71b254c4afc76ad9cb805800c3fcaba7c844c366840b3326b2b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://edab1b03a48424e17428c4c9d8a7aa7d76281582c2fadba08f5a30cce9a4d283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.534222 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d81076f2cc2223a6ea9ee917c3436ae30fa0948e8f1ce61abcec519c316d6a73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.545361 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc0b032-e995-4d0f-b5e7-600b880849f5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818d25bc6d9bd936bebaf192ffe1a9fec7d39140a4a936c4f4cad3b4c72ead1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rsm25\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-7q8lw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.557296 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6fb3a766-b56c-4b6c-a09e-3666df4accb8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f54b5f75e10a5fd9f43eec7433614b23bef72beb32ff9028155852c09d9b2e7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c03a8d713a4b0a06c47eb28b20328d66f0f3475b56a16ded6f429dd6648e13a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://13e953bfbd6033682f3959815eedf4f814d275be3391564618723c3491faee3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8d24831966361aea37178bde47fb600caca13eccaf510a19017e7404cb801114\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d24831966361aea37178bde47fb600caca13eccaf510a19017e7404cb801114\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.559082 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.559116 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.559124 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.559136 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.559146 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:54Z","lastTransitionTime":"2025-12-05T05:53:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.571005 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.586112 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1d4a1f54004c9f2d503203f72071b6672f4494e462ded3ed538c8437cea35afa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.605091 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"130d4974-9fb6-4cdb-b115-56d2a96b1438\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://581c48baad5c52f537dbeef4685c29e54b3748256140bac370782fe889a4330d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://83df213f45da69151228f0c751d4b3a8e174da6716da6a397d0e2204d61c214c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37296fe7781c7619c1816f516fa1727d1bfb4fb76426753c9cf5b07a78d02aa8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4263c5e474d8cef9c2da2176cc97c4e103c7c50cce6122439819773fcae71c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3f8ecbd39c1c164104a565d1f00e71488dfff3dd9c0828c3d3ea27a0de16993d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5a8610d723352ffaae41a2a6bcf7b50c874fdd43406328f52e810d1da0ef8cf7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e9ffbc2d2bf40fe9da934219eadb710ac8bfae746f190bde08ab27a42edc6fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vvh2f\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2gbwd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.631441 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ddc689-50f2-409f-9ac8-8f6a1bed0831\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:53:19Z\\\",\\\"message\\\":\\\"1.EgressIP event handler 8\\\\nI1205 05:53:19.555770 6722 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 05:53:19.555814 6722 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 05:53:19.555883 6722 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 05:53:19.555890 6722 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 05:53:19.555896 6722 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 05:53:19.555919 6722 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 05:53:19.555919 6722 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 05:53:19.555929 6722 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 05:53:19.555946 6722 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 05:53:19.555940 6722 factory.go:656] Stopping watch factory\\\\nI1205 05:53:19.555956 6722 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 05:53:19.556100 6722 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1205 05:53:19.556186 6722 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1205 05:53:19.556222 6722 ovnkube.go:599] Stopped ovnkube\\\\nI1205 05:53:19.556245 6722 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF1205 05:53:19.556306 6722 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:53:18Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-m9jc4_openshift-ovn-kubernetes(06ddc689-50f2-409f-9ac8-8f6a1bed0831)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b82wp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-m9jc4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.643509 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-wh7m2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56d21615-e900-43cf-9aa3-753144dbf53f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0dac26bd401b28bc4423be9c35296a6c2950f69629970103cfe596c1d2ed376c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dn57b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-wh7m2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.660124 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-776bt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39641a18-5d13-441f-9956-3777b9f27703\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:53:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8018f3950c937efad0c3cafc0ce7a20baefa32c9176d2de69397d16610bf422\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T05:53:10Z\\\",\\\"message\\\":\\\"2025-12-05T05:52:24+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_856c13c0-c93e-4630-87d1-ff3d8f16bab5\\\\n2025-12-05T05:52:24+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_856c13c0-c93e-4630-87d1-ff3d8f16bab5 to /host/opt/cni/bin/\\\\n2025-12-05T05:52:25Z [verbose] multus-daemon started\\\\n2025-12-05T05:52:25Z [verbose] Readiness Indicator file check\\\\n2025-12-05T05:53:10Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T05:52:24Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:53:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gcqvd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-776bt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.661679 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.661726 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.661739 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.661755 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.661767 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:54Z","lastTransitionTime":"2025-12-05T05:53:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.676421 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ttdt8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e578c028-99f7-4a07-91cb-58ff75f25dcd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06c36a1d0702b3a5eb1b21a10aea79d92dae2e40dc8f3b79dd91794358fddee6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rglxc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ttdt8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.691563 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"056de541-2d3a-4782-a2cc-0c96c465ca6f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff553f2d53d8dbc9847c9055d7f9bf398e1ce8a955100a41e958800ad730e4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbe2749cda239484a91be5aeb17a4d22cf1e190e814880af130c97a903ee8d33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T05:52:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2nh29\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-hv29p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.704737 4742 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pbtb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b69352e1-2d48-4211-83e1-25d09fff9d3c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T05:52:37Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vv5v2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T05:52:37Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pbtb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T05:53:54Z is after 2025-08-24T17:21:41Z" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.764990 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.765126 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.765152 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.765182 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.765204 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:54Z","lastTransitionTime":"2025-12-05T05:53:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.868347 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.868392 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.868401 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.868414 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.868422 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:54Z","lastTransitionTime":"2025-12-05T05:53:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.970432 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.970795 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.970934 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.971120 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:54 crc kubenswrapper[4742]: I1205 05:53:54.971267 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:54Z","lastTransitionTime":"2025-12-05T05:53:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.074831 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.074906 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.074927 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.074953 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.074972 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:55Z","lastTransitionTime":"2025-12-05T05:53:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.177428 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.177465 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.177476 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.177491 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.177502 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:55Z","lastTransitionTime":"2025-12-05T05:53:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.280566 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.280752 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.280836 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.280873 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.280897 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:55Z","lastTransitionTime":"2025-12-05T05:53:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.381925 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.381925 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.382135 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:53:55 crc kubenswrapper[4742]: E1205 05:53:55.382137 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.382366 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:53:55 crc kubenswrapper[4742]: E1205 05:53:55.382353 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:53:55 crc kubenswrapper[4742]: E1205 05:53:55.382556 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:53:55 crc kubenswrapper[4742]: E1205 05:53:55.382698 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.385156 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.385208 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.385226 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.385250 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.385270 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:55Z","lastTransitionTime":"2025-12-05T05:53:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.488430 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.488489 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.488507 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.488533 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.488558 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:55Z","lastTransitionTime":"2025-12-05T05:53:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.592034 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.592165 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.592195 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.592228 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.592249 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:55Z","lastTransitionTime":"2025-12-05T05:53:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.695469 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.695527 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.695544 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.695567 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.695584 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:55Z","lastTransitionTime":"2025-12-05T05:53:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.799158 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.799218 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.799235 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.799258 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.799277 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:55Z","lastTransitionTime":"2025-12-05T05:53:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.902530 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.902623 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.902652 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.902676 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:55 crc kubenswrapper[4742]: I1205 05:53:55.902692 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:55Z","lastTransitionTime":"2025-12-05T05:53:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.005306 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.005373 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.005391 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.005415 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.005433 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:56Z","lastTransitionTime":"2025-12-05T05:53:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.108452 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.108515 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.108532 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.108558 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.108581 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:56Z","lastTransitionTime":"2025-12-05T05:53:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.212090 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.212155 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.212174 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.212199 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.212219 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:56Z","lastTransitionTime":"2025-12-05T05:53:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.315427 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.315505 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.315547 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.315580 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.315603 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:56Z","lastTransitionTime":"2025-12-05T05:53:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.418596 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.418657 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.418681 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.418704 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.418719 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:56Z","lastTransitionTime":"2025-12-05T05:53:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.521944 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.522010 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.522028 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.522098 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.522115 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:56Z","lastTransitionTime":"2025-12-05T05:53:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.625410 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.625472 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.625494 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.625520 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.625541 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:56Z","lastTransitionTime":"2025-12-05T05:53:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.728309 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.728366 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.728384 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.728406 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.728423 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:56Z","lastTransitionTime":"2025-12-05T05:53:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.832211 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.832275 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.832300 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.832334 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.832356 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:56Z","lastTransitionTime":"2025-12-05T05:53:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.935332 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.935463 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.935484 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.935507 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:56 crc kubenswrapper[4742]: I1205 05:53:56.935525 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:56Z","lastTransitionTime":"2025-12-05T05:53:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.038509 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.038585 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.038606 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.038631 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.038650 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:57Z","lastTransitionTime":"2025-12-05T05:53:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.141404 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.141452 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.141466 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.141486 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.141502 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:57Z","lastTransitionTime":"2025-12-05T05:53:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.244494 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.244549 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.244563 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.244582 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.244597 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:57Z","lastTransitionTime":"2025-12-05T05:53:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.300045 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-776bt_39641a18-5d13-441f-9956-3777b9f27703/kube-multus/1.log" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.300634 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-776bt_39641a18-5d13-441f-9956-3777b9f27703/kube-multus/0.log" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.300706 4742 generic.go:334] "Generic (PLEG): container finished" podID="39641a18-5d13-441f-9956-3777b9f27703" containerID="c8018f3950c937efad0c3cafc0ce7a20baefa32c9176d2de69397d16610bf422" exitCode=1 Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.300754 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-776bt" event={"ID":"39641a18-5d13-441f-9956-3777b9f27703","Type":"ContainerDied","Data":"c8018f3950c937efad0c3cafc0ce7a20baefa32c9176d2de69397d16610bf422"} Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.300803 4742 scope.go:117] "RemoveContainer" containerID="0a819435ccef87781b4fd048d1f1334075cf310c69926ed537e43ab77ed835f8" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.301394 4742 scope.go:117] "RemoveContainer" containerID="c8018f3950c937efad0c3cafc0ce7a20baefa32c9176d2de69397d16610bf422" Dec 05 05:53:57 crc kubenswrapper[4742]: E1205 05:53:57.301771 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-776bt_openshift-multus(39641a18-5d13-441f-9956-3777b9f27703)\"" pod="openshift-multus/multus-776bt" podUID="39641a18-5d13-441f-9956-3777b9f27703" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.326692 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=49.326662257 podStartE2EDuration="49.326662257s" podCreationTimestamp="2025-12-05 05:53:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:53:57.326312778 +0000 UTC m=+113.238447910" watchObservedRunningTime="2025-12-05 05:53:57.326662257 +0000 UTC m=+113.238797369" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.346779 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.346848 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.346867 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.346894 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.346913 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:57Z","lastTransitionTime":"2025-12-05T05:53:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.361707 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=92.361658775 podStartE2EDuration="1m32.361658775s" podCreationTimestamp="2025-12-05 05:52:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:53:57.360216766 +0000 UTC m=+113.272351918" watchObservedRunningTime="2025-12-05 05:53:57.361658775 +0000 UTC m=+113.273793877" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.381957 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.382008 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.382025 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:53:57 crc kubenswrapper[4742]: E1205 05:53:57.382210 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.382279 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:53:57 crc kubenswrapper[4742]: E1205 05:53:57.382439 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.382462 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=94.382443317 podStartE2EDuration="1m34.382443317s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:53:57.382347885 +0000 UTC m=+113.294482957" watchObservedRunningTime="2025-12-05 05:53:57.382443317 +0000 UTC m=+113.294578419" Dec 05 05:53:57 crc kubenswrapper[4742]: E1205 05:53:57.382540 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:53:57 crc kubenswrapper[4742]: E1205 05:53:57.382647 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.426436 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=94.426406097 podStartE2EDuration="1m34.426406097s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:53:57.426326215 +0000 UTC m=+113.338461327" watchObservedRunningTime="2025-12-05 05:53:57.426406097 +0000 UTC m=+113.338541199" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.450068 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.450108 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.450119 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.450136 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.450148 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:57Z","lastTransitionTime":"2025-12-05T05:53:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.480792 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podStartSLOduration=94.480765509 podStartE2EDuration="1m34.480765509s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:53:57.479720721 +0000 UTC m=+113.391855793" watchObservedRunningTime="2025-12-05 05:53:57.480765509 +0000 UTC m=+113.392900601" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.518473 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=62.518451529000004 podStartE2EDuration="1m2.518451529s" podCreationTimestamp="2025-12-05 05:52:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:53:57.500503593 +0000 UTC m=+113.412638695" watchObservedRunningTime="2025-12-05 05:53:57.518451529 +0000 UTC m=+113.430586631" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.552648 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.552714 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.552726 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.552742 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.552773 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:57Z","lastTransitionTime":"2025-12-05T05:53:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.556638 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-2gbwd" podStartSLOduration=94.556618293 podStartE2EDuration="1m34.556618293s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:53:57.556552841 +0000 UTC m=+113.468687923" watchObservedRunningTime="2025-12-05 05:53:57.556618293 +0000 UTC m=+113.468753355" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.617366 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-wh7m2" podStartSLOduration=94.617344007 podStartE2EDuration="1m34.617344007s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:53:57.604383786 +0000 UTC m=+113.516518848" watchObservedRunningTime="2025-12-05 05:53:57.617344007 +0000 UTC m=+113.529479069" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.628769 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-ttdt8" podStartSLOduration=94.628750235 podStartE2EDuration="1m34.628750235s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:53:57.628045586 +0000 UTC m=+113.540180698" watchObservedRunningTime="2025-12-05 05:53:57.628750235 +0000 UTC m=+113.540885307" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.648772 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-hv29p" podStartSLOduration=93.648739856 podStartE2EDuration="1m33.648739856s" podCreationTimestamp="2025-12-05 05:52:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:53:57.646185457 +0000 UTC m=+113.558320539" watchObservedRunningTime="2025-12-05 05:53:57.648739856 +0000 UTC m=+113.560874958" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.656022 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.656142 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.656169 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.656200 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.656223 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:57Z","lastTransitionTime":"2025-12-05T05:53:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.759600 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.759676 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.759694 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.759716 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.759732 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:57Z","lastTransitionTime":"2025-12-05T05:53:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.863216 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.863286 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.863311 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.863344 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.863363 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:57Z","lastTransitionTime":"2025-12-05T05:53:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.965563 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.965628 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.965645 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.965667 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:57 crc kubenswrapper[4742]: I1205 05:53:57.965683 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:57Z","lastTransitionTime":"2025-12-05T05:53:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.067963 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.068028 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.068039 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.068080 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.068092 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:58Z","lastTransitionTime":"2025-12-05T05:53:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.170491 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.170558 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.170577 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.170604 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.170623 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:58Z","lastTransitionTime":"2025-12-05T05:53:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.273969 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.274045 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.274104 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.274130 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.274153 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:58Z","lastTransitionTime":"2025-12-05T05:53:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.307426 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-776bt_39641a18-5d13-441f-9956-3777b9f27703/kube-multus/1.log" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.376555 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.376641 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.376661 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.376683 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.376701 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:58Z","lastTransitionTime":"2025-12-05T05:53:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.395811 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.395863 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.395880 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.395902 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.395919 4742 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T05:53:58Z","lastTransitionTime":"2025-12-05T05:53:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.459215 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-fx8g2"] Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.459777 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-fx8g2" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.462050 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.463163 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.463169 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.465298 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.492317 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e4af61d6-3151-44e6-8d85-35a97cefc715-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-fx8g2\" (UID: \"e4af61d6-3151-44e6-8d85-35a97cefc715\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-fx8g2" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.492413 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/e4af61d6-3151-44e6-8d85-35a97cefc715-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-fx8g2\" (UID: \"e4af61d6-3151-44e6-8d85-35a97cefc715\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-fx8g2" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.492475 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/e4af61d6-3151-44e6-8d85-35a97cefc715-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-fx8g2\" (UID: \"e4af61d6-3151-44e6-8d85-35a97cefc715\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-fx8g2" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.492528 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e4af61d6-3151-44e6-8d85-35a97cefc715-service-ca\") pod \"cluster-version-operator-5c965bbfc6-fx8g2\" (UID: \"e4af61d6-3151-44e6-8d85-35a97cefc715\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-fx8g2" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.492573 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e4af61d6-3151-44e6-8d85-35a97cefc715-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-fx8g2\" (UID: \"e4af61d6-3151-44e6-8d85-35a97cefc715\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-fx8g2" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.593428 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e4af61d6-3151-44e6-8d85-35a97cefc715-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-fx8g2\" (UID: \"e4af61d6-3151-44e6-8d85-35a97cefc715\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-fx8g2" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.593511 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/e4af61d6-3151-44e6-8d85-35a97cefc715-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-fx8g2\" (UID: \"e4af61d6-3151-44e6-8d85-35a97cefc715\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-fx8g2" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.593593 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/e4af61d6-3151-44e6-8d85-35a97cefc715-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-fx8g2\" (UID: \"e4af61d6-3151-44e6-8d85-35a97cefc715\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-fx8g2" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.593663 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e4af61d6-3151-44e6-8d85-35a97cefc715-service-ca\") pod \"cluster-version-operator-5c965bbfc6-fx8g2\" (UID: \"e4af61d6-3151-44e6-8d85-35a97cefc715\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-fx8g2" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.593794 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/e4af61d6-3151-44e6-8d85-35a97cefc715-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-fx8g2\" (UID: \"e4af61d6-3151-44e6-8d85-35a97cefc715\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-fx8g2" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.593798 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e4af61d6-3151-44e6-8d85-35a97cefc715-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-fx8g2\" (UID: \"e4af61d6-3151-44e6-8d85-35a97cefc715\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-fx8g2" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.593989 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/e4af61d6-3151-44e6-8d85-35a97cefc715-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-fx8g2\" (UID: \"e4af61d6-3151-44e6-8d85-35a97cefc715\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-fx8g2" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.595331 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e4af61d6-3151-44e6-8d85-35a97cefc715-service-ca\") pod \"cluster-version-operator-5c965bbfc6-fx8g2\" (UID: \"e4af61d6-3151-44e6-8d85-35a97cefc715\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-fx8g2" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.604582 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e4af61d6-3151-44e6-8d85-35a97cefc715-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-fx8g2\" (UID: \"e4af61d6-3151-44e6-8d85-35a97cefc715\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-fx8g2" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.620650 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e4af61d6-3151-44e6-8d85-35a97cefc715-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-fx8g2\" (UID: \"e4af61d6-3151-44e6-8d85-35a97cefc715\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-fx8g2" Dec 05 05:53:58 crc kubenswrapper[4742]: I1205 05:53:58.781689 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-fx8g2" Dec 05 05:53:59 crc kubenswrapper[4742]: I1205 05:53:59.312700 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-fx8g2" event={"ID":"e4af61d6-3151-44e6-8d85-35a97cefc715","Type":"ContainerStarted","Data":"c5a0a25123c5a4454864104157efc24db3469d42437796dceb4206d9e552da53"} Dec 05 05:53:59 crc kubenswrapper[4742]: I1205 05:53:59.312762 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-fx8g2" event={"ID":"e4af61d6-3151-44e6-8d85-35a97cefc715","Type":"ContainerStarted","Data":"a3dbf37ade612d458559cb53feb33e783bf41ef3f4786cfa79592c52e9d3f7b6"} Dec 05 05:53:59 crc kubenswrapper[4742]: I1205 05:53:59.335617 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-fx8g2" podStartSLOduration=96.335591262 podStartE2EDuration="1m36.335591262s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:53:59.333343791 +0000 UTC m=+115.245478863" watchObservedRunningTime="2025-12-05 05:53:59.335591262 +0000 UTC m=+115.247726364" Dec 05 05:53:59 crc kubenswrapper[4742]: I1205 05:53:59.382733 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:53:59 crc kubenswrapper[4742]: I1205 05:53:59.382842 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:53:59 crc kubenswrapper[4742]: I1205 05:53:59.382751 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:53:59 crc kubenswrapper[4742]: E1205 05:53:59.383016 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:53:59 crc kubenswrapper[4742]: E1205 05:53:59.382924 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:53:59 crc kubenswrapper[4742]: I1205 05:53:59.382840 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:53:59 crc kubenswrapper[4742]: E1205 05:53:59.383249 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:53:59 crc kubenswrapper[4742]: E1205 05:53:59.383328 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:54:01 crc kubenswrapper[4742]: I1205 05:54:01.381919 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:54:01 crc kubenswrapper[4742]: I1205 05:54:01.381951 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:54:01 crc kubenswrapper[4742]: I1205 05:54:01.382018 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:54:01 crc kubenswrapper[4742]: E1205 05:54:01.382023 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:54:01 crc kubenswrapper[4742]: I1205 05:54:01.382089 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:54:01 crc kubenswrapper[4742]: E1205 05:54:01.382281 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:54:01 crc kubenswrapper[4742]: E1205 05:54:01.382634 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:54:01 crc kubenswrapper[4742]: E1205 05:54:01.382778 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:54:01 crc kubenswrapper[4742]: I1205 05:54:01.382820 4742 scope.go:117] "RemoveContainer" containerID="d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251" Dec 05 05:54:02 crc kubenswrapper[4742]: I1205 05:54:02.325229 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m9jc4_06ddc689-50f2-409f-9ac8-8f6a1bed0831/ovnkube-controller/3.log" Dec 05 05:54:02 crc kubenswrapper[4742]: I1205 05:54:02.328876 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" event={"ID":"06ddc689-50f2-409f-9ac8-8f6a1bed0831","Type":"ContainerStarted","Data":"4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81"} Dec 05 05:54:02 crc kubenswrapper[4742]: I1205 05:54:02.329357 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:54:02 crc kubenswrapper[4742]: I1205 05:54:02.364496 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" podStartSLOduration=99.364472058 podStartE2EDuration="1m39.364472058s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:02.363270735 +0000 UTC m=+118.275405837" watchObservedRunningTime="2025-12-05 05:54:02.364472058 +0000 UTC m=+118.276607160" Dec 05 05:54:02 crc kubenswrapper[4742]: I1205 05:54:02.387675 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-pbtb4"] Dec 05 05:54:02 crc kubenswrapper[4742]: I1205 05:54:02.387867 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:54:02 crc kubenswrapper[4742]: E1205 05:54:02.387997 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:54:03 crc kubenswrapper[4742]: I1205 05:54:03.381836 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:54:03 crc kubenswrapper[4742]: I1205 05:54:03.381844 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:54:03 crc kubenswrapper[4742]: E1205 05:54:03.382123 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:54:03 crc kubenswrapper[4742]: I1205 05:54:03.381849 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:54:03 crc kubenswrapper[4742]: E1205 05:54:03.382297 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:54:03 crc kubenswrapper[4742]: E1205 05:54:03.382386 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:54:04 crc kubenswrapper[4742]: E1205 05:54:04.364259 4742 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 05 05:54:04 crc kubenswrapper[4742]: I1205 05:54:04.382304 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:54:04 crc kubenswrapper[4742]: E1205 05:54:04.383080 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:54:04 crc kubenswrapper[4742]: E1205 05:54:04.482601 4742 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 05:54:05 crc kubenswrapper[4742]: I1205 05:54:05.381942 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:54:05 crc kubenswrapper[4742]: I1205 05:54:05.381942 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:54:05 crc kubenswrapper[4742]: E1205 05:54:05.382180 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:54:05 crc kubenswrapper[4742]: E1205 05:54:05.382290 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:54:05 crc kubenswrapper[4742]: I1205 05:54:05.383105 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:54:05 crc kubenswrapper[4742]: E1205 05:54:05.383254 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:54:06 crc kubenswrapper[4742]: I1205 05:54:06.381982 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:54:06 crc kubenswrapper[4742]: E1205 05:54:06.382592 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:54:07 crc kubenswrapper[4742]: I1205 05:54:07.382586 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:54:07 crc kubenswrapper[4742]: I1205 05:54:07.382783 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:54:07 crc kubenswrapper[4742]: I1205 05:54:07.382879 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:54:07 crc kubenswrapper[4742]: E1205 05:54:07.383006 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:54:07 crc kubenswrapper[4742]: E1205 05:54:07.383185 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:54:07 crc kubenswrapper[4742]: E1205 05:54:07.383538 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:54:08 crc kubenswrapper[4742]: I1205 05:54:08.382201 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:54:08 crc kubenswrapper[4742]: E1205 05:54:08.382395 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:54:09 crc kubenswrapper[4742]: I1205 05:54:09.382320 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:54:09 crc kubenswrapper[4742]: I1205 05:54:09.382339 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:54:09 crc kubenswrapper[4742]: E1205 05:54:09.382535 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:54:09 crc kubenswrapper[4742]: I1205 05:54:09.382605 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:54:09 crc kubenswrapper[4742]: E1205 05:54:09.382692 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:54:09 crc kubenswrapper[4742]: E1205 05:54:09.382889 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:54:09 crc kubenswrapper[4742]: E1205 05:54:09.484016 4742 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 05:54:10 crc kubenswrapper[4742]: I1205 05:54:10.382102 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:54:10 crc kubenswrapper[4742]: E1205 05:54:10.382240 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:54:11 crc kubenswrapper[4742]: I1205 05:54:11.382468 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:54:11 crc kubenswrapper[4742]: I1205 05:54:11.382635 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:54:11 crc kubenswrapper[4742]: I1205 05:54:11.382499 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:54:11 crc kubenswrapper[4742]: E1205 05:54:11.382689 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:54:11 crc kubenswrapper[4742]: E1205 05:54:11.382861 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:54:11 crc kubenswrapper[4742]: E1205 05:54:11.382904 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:54:12 crc kubenswrapper[4742]: I1205 05:54:12.381814 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:54:12 crc kubenswrapper[4742]: E1205 05:54:12.381993 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:54:12 crc kubenswrapper[4742]: I1205 05:54:12.382532 4742 scope.go:117] "RemoveContainer" containerID="c8018f3950c937efad0c3cafc0ce7a20baefa32c9176d2de69397d16610bf422" Dec 05 05:54:13 crc kubenswrapper[4742]: I1205 05:54:13.371369 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-776bt_39641a18-5d13-441f-9956-3777b9f27703/kube-multus/1.log" Dec 05 05:54:13 crc kubenswrapper[4742]: I1205 05:54:13.371451 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-776bt" event={"ID":"39641a18-5d13-441f-9956-3777b9f27703","Type":"ContainerStarted","Data":"27d377e2c76cba7de8c7c932e8375753cd96d8de8cb46c0a87705032bf8934b1"} Dec 05 05:54:13 crc kubenswrapper[4742]: I1205 05:54:13.382188 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:54:13 crc kubenswrapper[4742]: I1205 05:54:13.382193 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:54:13 crc kubenswrapper[4742]: I1205 05:54:13.382798 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:54:13 crc kubenswrapper[4742]: E1205 05:54:13.383569 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 05:54:13 crc kubenswrapper[4742]: E1205 05:54:13.383827 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 05:54:13 crc kubenswrapper[4742]: E1205 05:54:13.384130 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 05:54:13 crc kubenswrapper[4742]: I1205 05:54:13.406176 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-776bt" podStartSLOduration=110.406148834 podStartE2EDuration="1m50.406148834s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:13.405508397 +0000 UTC m=+129.317643529" watchObservedRunningTime="2025-12-05 05:54:13.406148834 +0000 UTC m=+129.318283936" Dec 05 05:54:14 crc kubenswrapper[4742]: I1205 05:54:14.382015 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:54:14 crc kubenswrapper[4742]: E1205 05:54:14.384633 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pbtb4" podUID="b69352e1-2d48-4211-83e1-25d09fff9d3c" Dec 05 05:54:15 crc kubenswrapper[4742]: I1205 05:54:15.381779 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:54:15 crc kubenswrapper[4742]: I1205 05:54:15.381910 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:54:15 crc kubenswrapper[4742]: I1205 05:54:15.381779 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:54:15 crc kubenswrapper[4742]: I1205 05:54:15.385868 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 05 05:54:15 crc kubenswrapper[4742]: I1205 05:54:15.385925 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 05 05:54:15 crc kubenswrapper[4742]: I1205 05:54:15.385986 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 05 05:54:15 crc kubenswrapper[4742]: I1205 05:54:15.386261 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 05 05:54:16 crc kubenswrapper[4742]: I1205 05:54:16.381902 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:54:16 crc kubenswrapper[4742]: I1205 05:54:16.385014 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 05 05:54:16 crc kubenswrapper[4742]: I1205 05:54:16.385392 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.175789 4742 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.233761 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-7mhpl"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.234501 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.238113 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-zk4vj"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.239264 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-zk4vj" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.239965 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.240873 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.247359 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-pxlnb"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.248042 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pxlnb" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.249656 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rbzzz"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.250032 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rbzzz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.253414 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-45686"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.253912 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-2279c"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.254415 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2279c" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.254417 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.255049 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-79dwh"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.255856 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-79dwh" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.257912 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-778cz"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.258413 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-778cz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.258781 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-d6dsw"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.259369 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-d6dsw" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.260567 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-gn9cx"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.260946 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-gn9cx" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.261619 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.261935 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.261995 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.262168 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.262352 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.262491 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.262789 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.262886 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.262964 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.263075 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.263154 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.266765 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.267113 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.267324 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.267534 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.268117 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.268711 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.269391 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.269798 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.270380 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.270779 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.274282 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.275361 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.275734 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.276788 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.262391 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.262440 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.291522 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.291830 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.292011 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.292343 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.293218 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.293382 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.293516 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.295530 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.295868 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.296017 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.296125 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.298748 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.298758 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.299036 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-flkxk"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.299476 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.299721 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.299821 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.300004 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.300177 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.300289 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-flkxk" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.300405 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.300579 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.300666 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.300747 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.300816 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.300898 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.300969 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.302281 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.299829 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.302430 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.302555 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.302582 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.300220 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k5bqg"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.302674 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.302680 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.302714 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.302761 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.302840 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.302849 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.302860 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.302939 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.302985 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.302991 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-nfv6w"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.303085 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.303162 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.303252 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.303313 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-nfv6w" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.303358 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.303471 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.303603 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k5bqg" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.304073 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.304128 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.304380 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.310760 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.311822 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.313457 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.314253 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-vfstf"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.314775 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-vfstf" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.314785 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.335812 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.336119 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.336466 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.336834 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.339860 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.340117 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.344658 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-4ndvx"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.344769 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.345510 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vr779\" (UniqueName: \"kubernetes.io/projected/4b516291-ecb5-48f1-8279-1448a2ad8f03-kube-api-access-vr779\") pod \"machine-approver-56656f9798-pxlnb\" (UID: \"4b516291-ecb5-48f1-8279-1448a2ad8f03\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pxlnb" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.345570 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkxbz\" (UniqueName: \"kubernetes.io/projected/bfef6735-7572-4411-b37d-b194d84534de-kube-api-access-vkxbz\") pod \"downloads-7954f5f757-gn9cx\" (UID: \"bfef6735-7572-4411-b37d-b194d84534de\") " pod="openshift-console/downloads-7954f5f757-gn9cx" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.357249 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.357421 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.357490 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.345612 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/defcf8e8-7650-448f-9950-3434978ee21d-encryption-config\") pod \"apiserver-7bbb656c7d-wpv7n\" (UID: \"defcf8e8-7650-448f-9950-3434978ee21d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.357656 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.357740 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.358030 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.358394 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.358405 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.358411 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.358607 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.360376 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.360637 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.360765 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.360943 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-2cttt"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.361177 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/51f64621-97df-411c-bb21-c24a7c2976be-serving-cert\") pod \"authentication-operator-69f744f599-79dwh\" (UID: \"51f64621-97df-411c-bb21-c24a7c2976be\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-79dwh" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.361270 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.361345 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f3d9ea9f-6af6-42ea-9298-2e970da2572e-audit-policies\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.361386 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.361402 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqn87\" (UniqueName: \"kubernetes.io/projected/51f64621-97df-411c-bb21-c24a7c2976be-kube-api-access-vqn87\") pod \"authentication-operator-69f744f599-79dwh\" (UID: \"51f64621-97df-411c-bb21-c24a7c2976be\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-79dwh" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.361421 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d2e8268-b920-41b6-a22d-50dca94f8a10-config\") pod \"openshift-apiserver-operator-796bbdcf4f-rbzzz\" (UID: \"3d2e8268-b920-41b6-a22d-50dca94f8a10\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rbzzz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.361456 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/defcf8e8-7650-448f-9950-3434978ee21d-audit-dir\") pod \"apiserver-7bbb656c7d-wpv7n\" (UID: \"defcf8e8-7650-448f-9950-3434978ee21d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.361476 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/51f64621-97df-411c-bb21-c24a7c2976be-service-ca-bundle\") pod \"authentication-operator-69f744f599-79dwh\" (UID: \"51f64621-97df-411c-bb21-c24a7c2976be\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-79dwh" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.361496 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-encryption-config\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.361544 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/df7f2c2a-0105-489e-9087-3fbf406856a9-trusted-ca\") pod \"console-operator-58897d9998-d6dsw\" (UID: \"df7f2c2a-0105-489e-9087-3fbf406856a9\") " pod="openshift-console-operator/console-operator-58897d9998-d6dsw" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.361573 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.361592 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hl2cj\" (UniqueName: \"kubernetes.io/projected/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-kube-api-access-hl2cj\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.361641 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/defcf8e8-7650-448f-9950-3434978ee21d-serving-cert\") pod \"apiserver-7bbb656c7d-wpv7n\" (UID: \"defcf8e8-7650-448f-9950-3434978ee21d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.361677 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.361732 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/4b516291-ecb5-48f1-8279-1448a2ad8f03-machine-approver-tls\") pod \"machine-approver-56656f9798-pxlnb\" (UID: \"4b516291-ecb5-48f1-8279-1448a2ad8f03\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pxlnb" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.361756 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/91c75381-2f50-415e-b5c8-e1261be30bbc-console-oauth-config\") pod \"console-f9d7485db-778cz\" (UID: \"91c75381-2f50-415e-b5c8-e1261be30bbc\") " pod="openshift-console/console-f9d7485db-778cz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.361804 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-image-import-ca\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.361822 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-audit-dir\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.361839 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4b516291-ecb5-48f1-8279-1448a2ad8f03-auth-proxy-config\") pod \"machine-approver-56656f9798-pxlnb\" (UID: \"4b516291-ecb5-48f1-8279-1448a2ad8f03\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pxlnb" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.361892 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/518731b7-0f61-40b4-ad6c-c49383c0dd5b-config\") pod \"machine-api-operator-5694c8668f-zk4vj\" (UID: \"518731b7-0f61-40b4-ad6c-c49383c0dd5b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zk4vj" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.361912 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/91c75381-2f50-415e-b5c8-e1261be30bbc-oauth-serving-cert\") pod \"console-f9d7485db-778cz\" (UID: \"91c75381-2f50-415e-b5c8-e1261be30bbc\") " pod="openshift-console/console-f9d7485db-778cz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.361940 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-2cttt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.361952 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.361971 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/51f64621-97df-411c-bb21-c24a7c2976be-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-79dwh\" (UID: \"51f64621-97df-411c-bb21-c24a7c2976be\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-79dwh" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.361986 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-audit\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.362026 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txc45\" (UniqueName: \"kubernetes.io/projected/defcf8e8-7650-448f-9950-3434978ee21d-kube-api-access-txc45\") pod \"apiserver-7bbb656c7d-wpv7n\" (UID: \"defcf8e8-7650-448f-9950-3434978ee21d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.362048 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.362088 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0c7188d0-4020-4749-8bd6-98b637ce3f3c-client-ca\") pod \"route-controller-manager-6576b87f9c-2279c\" (UID: \"0c7188d0-4020-4749-8bd6-98b637ce3f3c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2279c" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.362108 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df7f2c2a-0105-489e-9087-3fbf406856a9-config\") pod \"console-operator-58897d9998-d6dsw\" (UID: \"df7f2c2a-0105-489e-9087-3fbf406856a9\") " pod="openshift-console-operator/console-operator-58897d9998-d6dsw" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.362123 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/518731b7-0f61-40b4-ad6c-c49383c0dd5b-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-zk4vj\" (UID: \"518731b7-0f61-40b4-ad6c-c49383c0dd5b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zk4vj" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.362177 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66zhn\" (UniqueName: \"kubernetes.io/projected/3d2e8268-b920-41b6-a22d-50dca94f8a10-kube-api-access-66zhn\") pod \"openshift-apiserver-operator-796bbdcf4f-rbzzz\" (UID: \"3d2e8268-b920-41b6-a22d-50dca94f8a10\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rbzzz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.362195 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/91c75381-2f50-415e-b5c8-e1261be30bbc-service-ca\") pod \"console-f9d7485db-778cz\" (UID: \"91c75381-2f50-415e-b5c8-e1261be30bbc\") " pod="openshift-console/console-f9d7485db-778cz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.362211 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.362249 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-config\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.362267 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b516291-ecb5-48f1-8279-1448a2ad8f03-config\") pod \"machine-approver-56656f9798-pxlnb\" (UID: \"4b516291-ecb5-48f1-8279-1448a2ad8f03\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pxlnb" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.362281 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6smz\" (UniqueName: \"kubernetes.io/projected/91c75381-2f50-415e-b5c8-e1261be30bbc-kube-api-access-z6smz\") pod \"console-f9d7485db-778cz\" (UID: \"91c75381-2f50-415e-b5c8-e1261be30bbc\") " pod="openshift-console/console-f9d7485db-778cz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.362321 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/defcf8e8-7650-448f-9950-3434978ee21d-etcd-client\") pod \"apiserver-7bbb656c7d-wpv7n\" (UID: \"defcf8e8-7650-448f-9950-3434978ee21d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.362339 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/defcf8e8-7650-448f-9950-3434978ee21d-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-wpv7n\" (UID: \"defcf8e8-7650-448f-9950-3434978ee21d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.362357 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-serving-cert\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.362405 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3d2e8268-b920-41b6-a22d-50dca94f8a10-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-rbzzz\" (UID: \"3d2e8268-b920-41b6-a22d-50dca94f8a10\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rbzzz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.362426 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/defcf8e8-7650-448f-9950-3434978ee21d-audit-policies\") pod \"apiserver-7bbb656c7d-wpv7n\" (UID: \"defcf8e8-7650-448f-9950-3434978ee21d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.362441 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/df7f2c2a-0105-489e-9087-3fbf406856a9-serving-cert\") pod \"console-operator-58897d9998-d6dsw\" (UID: \"df7f2c2a-0105-489e-9087-3fbf406856a9\") " pod="openshift-console-operator/console-operator-58897d9998-d6dsw" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.362481 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.362496 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4ndvx" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.361864 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-xqfl8"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.362499 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lc5fq\" (UniqueName: \"kubernetes.io/projected/f3d9ea9f-6af6-42ea-9298-2e970da2572e-kube-api-access-lc5fq\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.363107 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.363138 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0c7188d0-4020-4749-8bd6-98b637ce3f3c-serving-cert\") pod \"route-controller-manager-6576b87f9c-2279c\" (UID: \"0c7188d0-4020-4749-8bd6-98b637ce3f3c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2279c" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.363161 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-node-pullsecrets\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.363181 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-etcd-serving-ca\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.363212 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/91c75381-2f50-415e-b5c8-e1261be30bbc-trusted-ca-bundle\") pod \"console-f9d7485db-778cz\" (UID: \"91c75381-2f50-415e-b5c8-e1261be30bbc\") " pod="openshift-console/console-f9d7485db-778cz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.363228 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.363247 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/91c75381-2f50-415e-b5c8-e1261be30bbc-console-config\") pod \"console-f9d7485db-778cz\" (UID: \"91c75381-2f50-415e-b5c8-e1261be30bbc\") " pod="openshift-console/console-f9d7485db-778cz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.363267 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fx4jb\" (UniqueName: \"kubernetes.io/projected/0c7188d0-4020-4749-8bd6-98b637ce3f3c-kube-api-access-fx4jb\") pod \"route-controller-manager-6576b87f9c-2279c\" (UID: \"0c7188d0-4020-4749-8bd6-98b637ce3f3c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2279c" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.363292 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/518731b7-0f61-40b4-ad6c-c49383c0dd5b-images\") pod \"machine-api-operator-5694c8668f-zk4vj\" (UID: \"518731b7-0f61-40b4-ad6c-c49383c0dd5b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zk4vj" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.363310 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.363348 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-795pw\" (UniqueName: \"kubernetes.io/projected/df7f2c2a-0105-489e-9087-3fbf406856a9-kube-api-access-795pw\") pod \"console-operator-58897d9998-d6dsw\" (UID: \"df7f2c2a-0105-489e-9087-3fbf406856a9\") " pod="openshift-console-operator/console-operator-58897d9998-d6dsw" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.363372 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/defcf8e8-7650-448f-9950-3434978ee21d-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-wpv7n\" (UID: \"defcf8e8-7650-448f-9950-3434978ee21d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.363389 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8j8hc\" (UniqueName: \"kubernetes.io/projected/518731b7-0f61-40b4-ad6c-c49383c0dd5b-kube-api-access-8j8hc\") pod \"machine-api-operator-5694c8668f-zk4vj\" (UID: \"518731b7-0f61-40b4-ad6c-c49383c0dd5b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zk4vj" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.363408 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/91c75381-2f50-415e-b5c8-e1261be30bbc-console-serving-cert\") pod \"console-f9d7485db-778cz\" (UID: \"91c75381-2f50-415e-b5c8-e1261be30bbc\") " pod="openshift-console/console-f9d7485db-778cz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.363421 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.363496 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s8jnz"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.363719 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.363836 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hxw88"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.363427 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.363877 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-trusted-ca-bundle\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.363899 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f3d9ea9f-6af6-42ea-9298-2e970da2572e-audit-dir\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.363917 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c7188d0-4020-4749-8bd6-98b637ce3f3c-config\") pod \"route-controller-manager-6576b87f9c-2279c\" (UID: \"0c7188d0-4020-4749-8bd6-98b637ce3f3c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2279c" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.363934 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51f64621-97df-411c-bb21-c24a7c2976be-config\") pod \"authentication-operator-69f744f599-79dwh\" (UID: \"51f64621-97df-411c-bb21-c24a7c2976be\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-79dwh" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.363953 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-etcd-client\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.364154 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-xqfl8" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.364424 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x9xsd"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.364617 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s8jnz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.365224 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.365249 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rsr6b"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.365255 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hxw88" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.365383 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x9xsd" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.366173 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-mp2sf"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.366475 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rsr6b" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.366504 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.366761 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vntjw"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.367400 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vntjw" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.369095 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-xmjng"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.369530 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-7j9tp"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.369755 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xmjng" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.370842 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7j9tp" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.371005 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-cqzqd"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.371299 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.371830 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-cqzqd" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.371983 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.373355 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mztl8"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.373783 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-24g5k"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.374355 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-24g5k" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.374576 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mztl8" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.374803 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-zk4vj"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.378185 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-7mhpl"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.378220 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9wtdc"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.378609 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-7jwfc"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.378946 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-7jwfc" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.378994 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9wtdc" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.379440 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-fpfzc"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.380693 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-bxxl2"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.381018 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.381306 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-fpfzc" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.382112 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z8tpl"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.382232 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-bxxl2" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.383599 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415225-grqtz"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.384001 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z8tpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.387973 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wvkz9"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.388990 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-grqtz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.389792 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-dfmr9"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.390736 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-dfmr9" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.391127 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wvkz9" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.396243 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.397295 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-58gtq"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.400508 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-qhd4g"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.400702 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-58gtq" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.401556 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-qhd4g" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.407668 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k5bqg"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.410195 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-d6dsw"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.412022 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-2279c"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.413265 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-79dwh"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.414307 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.416274 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-45686"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.417075 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.417568 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s8jnz"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.418512 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-2cttt"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.419986 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-flkxk"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.421462 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-xmjng"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.423182 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-7j9tp"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.425035 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x9xsd"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.425369 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rbzzz"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.427213 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rsr6b"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.428026 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-4ndvx"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.429363 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-vfstf"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.430079 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-778cz"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.431295 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-xqfl8"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.432260 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mztl8"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.433580 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hxw88"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.434243 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-gn9cx"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.435555 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9wtdc"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.436394 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.437327 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-mp2sf"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.437946 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415225-grqtz"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.439252 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-58gtq"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.441249 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z8tpl"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.445150 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vntjw"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.446865 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-7jwfc"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.450988 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wvkz9"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.452337 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-ngjs9"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.452883 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-ngjs9" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.456424 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.456900 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-qhd4g"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.458191 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-fpfzc"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.459165 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-cqzqd"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.460925 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-24g5k"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.462815 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-dfmr9"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.463987 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-bxxl2"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.464499 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51f64621-97df-411c-bb21-c24a7c2976be-config\") pod \"authentication-operator-69f744f599-79dwh\" (UID: \"51f64621-97df-411c-bb21-c24a7c2976be\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-79dwh" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.464537 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-etcd-client\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.464563 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f3d9ea9f-6af6-42ea-9298-2e970da2572e-audit-dir\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.464587 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c7188d0-4020-4749-8bd6-98b637ce3f3c-config\") pod \"route-controller-manager-6576b87f9c-2279c\" (UID: \"0c7188d0-4020-4749-8bd6-98b637ce3f3c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2279c" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.464611 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vr779\" (UniqueName: \"kubernetes.io/projected/4b516291-ecb5-48f1-8279-1448a2ad8f03-kube-api-access-vr779\") pod \"machine-approver-56656f9798-pxlnb\" (UID: \"4b516291-ecb5-48f1-8279-1448a2ad8f03\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pxlnb" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.464635 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkxbz\" (UniqueName: \"kubernetes.io/projected/bfef6735-7572-4411-b37d-b194d84534de-kube-api-access-vkxbz\") pod \"downloads-7954f5f757-gn9cx\" (UID: \"bfef6735-7572-4411-b37d-b194d84534de\") " pod="openshift-console/downloads-7954f5f757-gn9cx" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.464656 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/defcf8e8-7650-448f-9950-3434978ee21d-encryption-config\") pod \"apiserver-7bbb656c7d-wpv7n\" (UID: \"defcf8e8-7650-448f-9950-3434978ee21d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.464733 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-rd6kc"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.465459 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-rd6kc" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.465582 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-p8pxx"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.464744 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/51f64621-97df-411c-bb21-c24a7c2976be-serving-cert\") pod \"authentication-operator-69f744f599-79dwh\" (UID: \"51f64621-97df-411c-bb21-c24a7c2976be\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-79dwh" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.465718 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.465761 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f3d9ea9f-6af6-42ea-9298-2e970da2572e-audit-policies\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.465781 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.465799 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqn87\" (UniqueName: \"kubernetes.io/projected/51f64621-97df-411c-bb21-c24a7c2976be-kube-api-access-vqn87\") pod \"authentication-operator-69f744f599-79dwh\" (UID: \"51f64621-97df-411c-bb21-c24a7c2976be\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-79dwh" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.465814 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-encryption-config\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.465838 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d2e8268-b920-41b6-a22d-50dca94f8a10-config\") pod \"openshift-apiserver-operator-796bbdcf4f-rbzzz\" (UID: \"3d2e8268-b920-41b6-a22d-50dca94f8a10\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rbzzz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.465855 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/defcf8e8-7650-448f-9950-3434978ee21d-audit-dir\") pod \"apiserver-7bbb656c7d-wpv7n\" (UID: \"defcf8e8-7650-448f-9950-3434978ee21d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.465870 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/51f64621-97df-411c-bb21-c24a7c2976be-service-ca-bundle\") pod \"authentication-operator-69f744f599-79dwh\" (UID: \"51f64621-97df-411c-bb21-c24a7c2976be\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-79dwh" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.465888 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/df7f2c2a-0105-489e-9087-3fbf406856a9-trusted-ca\") pod \"console-operator-58897d9998-d6dsw\" (UID: \"df7f2c2a-0105-489e-9087-3fbf406856a9\") " pod="openshift-console-operator/console-operator-58897d9998-d6dsw" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.465904 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.465920 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hl2cj\" (UniqueName: \"kubernetes.io/projected/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-kube-api-access-hl2cj\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.465936 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/defcf8e8-7650-448f-9950-3434978ee21d-serving-cert\") pod \"apiserver-7bbb656c7d-wpv7n\" (UID: \"defcf8e8-7650-448f-9950-3434978ee21d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.465954 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.465974 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a302e9ed-44a5-41e8-8e91-c37771dca329-serving-cert\") pod \"controller-manager-879f6c89f-vfstf\" (UID: \"a302e9ed-44a5-41e8-8e91-c37771dca329\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vfstf" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.465989 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcwxk\" (UniqueName: \"kubernetes.io/projected/a302e9ed-44a5-41e8-8e91-c37771dca329-kube-api-access-wcwxk\") pod \"controller-manager-879f6c89f-vfstf\" (UID: \"a302e9ed-44a5-41e8-8e91-c37771dca329\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vfstf" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466010 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/4b516291-ecb5-48f1-8279-1448a2ad8f03-machine-approver-tls\") pod \"machine-approver-56656f9798-pxlnb\" (UID: \"4b516291-ecb5-48f1-8279-1448a2ad8f03\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pxlnb" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466029 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/91c75381-2f50-415e-b5c8-e1261be30bbc-console-oauth-config\") pod \"console-f9d7485db-778cz\" (UID: \"91c75381-2f50-415e-b5c8-e1261be30bbc\") " pod="openshift-console/console-f9d7485db-778cz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466048 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-image-import-ca\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466079 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-audit-dir\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466096 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a302e9ed-44a5-41e8-8e91-c37771dca329-client-ca\") pod \"controller-manager-879f6c89f-vfstf\" (UID: \"a302e9ed-44a5-41e8-8e91-c37771dca329\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vfstf" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466113 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/51f64621-97df-411c-bb21-c24a7c2976be-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-79dwh\" (UID: \"51f64621-97df-411c-bb21-c24a7c2976be\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-79dwh" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466129 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-audit\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466150 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4b516291-ecb5-48f1-8279-1448a2ad8f03-auth-proxy-config\") pod \"machine-approver-56656f9798-pxlnb\" (UID: \"4b516291-ecb5-48f1-8279-1448a2ad8f03\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pxlnb" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466167 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/518731b7-0f61-40b4-ad6c-c49383c0dd5b-config\") pod \"machine-api-operator-5694c8668f-zk4vj\" (UID: \"518731b7-0f61-40b4-ad6c-c49383c0dd5b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zk4vj" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466183 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/91c75381-2f50-415e-b5c8-e1261be30bbc-oauth-serving-cert\") pod \"console-f9d7485db-778cz\" (UID: \"91c75381-2f50-415e-b5c8-e1261be30bbc\") " pod="openshift-console/console-f9d7485db-778cz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466199 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466217 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txc45\" (UniqueName: \"kubernetes.io/projected/defcf8e8-7650-448f-9950-3434978ee21d-kube-api-access-txc45\") pod \"apiserver-7bbb656c7d-wpv7n\" (UID: \"defcf8e8-7650-448f-9950-3434978ee21d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466233 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466250 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0c7188d0-4020-4749-8bd6-98b637ce3f3c-client-ca\") pod \"route-controller-manager-6576b87f9c-2279c\" (UID: \"0c7188d0-4020-4749-8bd6-98b637ce3f3c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2279c" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466269 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df7f2c2a-0105-489e-9087-3fbf406856a9-config\") pod \"console-operator-58897d9998-d6dsw\" (UID: \"df7f2c2a-0105-489e-9087-3fbf406856a9\") " pod="openshift-console-operator/console-operator-58897d9998-d6dsw" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466284 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/518731b7-0f61-40b4-ad6c-c49383c0dd5b-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-zk4vj\" (UID: \"518731b7-0f61-40b4-ad6c-c49383c0dd5b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zk4vj" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466302 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/91c75381-2f50-415e-b5c8-e1261be30bbc-service-ca\") pod \"console-f9d7485db-778cz\" (UID: \"91c75381-2f50-415e-b5c8-e1261be30bbc\") " pod="openshift-console/console-f9d7485db-778cz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466330 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66zhn\" (UniqueName: \"kubernetes.io/projected/3d2e8268-b920-41b6-a22d-50dca94f8a10-kube-api-access-66zhn\") pod \"openshift-apiserver-operator-796bbdcf4f-rbzzz\" (UID: \"3d2e8268-b920-41b6-a22d-50dca94f8a10\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rbzzz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466346 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466364 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-config\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466395 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6smz\" (UniqueName: \"kubernetes.io/projected/91c75381-2f50-415e-b5c8-e1261be30bbc-kube-api-access-z6smz\") pod \"console-f9d7485db-778cz\" (UID: \"91c75381-2f50-415e-b5c8-e1261be30bbc\") " pod="openshift-console/console-f9d7485db-778cz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466413 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b516291-ecb5-48f1-8279-1448a2ad8f03-config\") pod \"machine-approver-56656f9798-pxlnb\" (UID: \"4b516291-ecb5-48f1-8279-1448a2ad8f03\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pxlnb" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466431 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/439d1335-3f44-4f26-ad21-b8580866130c-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-s8jnz\" (UID: \"439d1335-3f44-4f26-ad21-b8580866130c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s8jnz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466449 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/defcf8e8-7650-448f-9950-3434978ee21d-etcd-client\") pod \"apiserver-7bbb656c7d-wpv7n\" (UID: \"defcf8e8-7650-448f-9950-3434978ee21d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466465 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/defcf8e8-7650-448f-9950-3434978ee21d-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-wpv7n\" (UID: \"defcf8e8-7650-448f-9950-3434978ee21d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466479 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-serving-cert\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466498 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a302e9ed-44a5-41e8-8e91-c37771dca329-config\") pod \"controller-manager-879f6c89f-vfstf\" (UID: \"a302e9ed-44a5-41e8-8e91-c37771dca329\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vfstf" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466504 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-p8pxx" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466530 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3d2e8268-b920-41b6-a22d-50dca94f8a10-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-rbzzz\" (UID: \"3d2e8268-b920-41b6-a22d-50dca94f8a10\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rbzzz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466548 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/defcf8e8-7650-448f-9950-3434978ee21d-audit-policies\") pod \"apiserver-7bbb656c7d-wpv7n\" (UID: \"defcf8e8-7650-448f-9950-3434978ee21d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466563 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/df7f2c2a-0105-489e-9087-3fbf406856a9-serving-cert\") pod \"console-operator-58897d9998-d6dsw\" (UID: \"df7f2c2a-0105-489e-9087-3fbf406856a9\") " pod="openshift-console-operator/console-operator-58897d9998-d6dsw" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466589 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466606 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lc5fq\" (UniqueName: \"kubernetes.io/projected/f3d9ea9f-6af6-42ea-9298-2e970da2572e-kube-api-access-lc5fq\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466622 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0c7188d0-4020-4749-8bd6-98b637ce3f3c-serving-cert\") pod \"route-controller-manager-6576b87f9c-2279c\" (UID: \"0c7188d0-4020-4749-8bd6-98b637ce3f3c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2279c" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466640 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-node-pullsecrets\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466654 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-etcd-serving-ca\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466670 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a302e9ed-44a5-41e8-8e91-c37771dca329-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-vfstf\" (UID: \"a302e9ed-44a5-41e8-8e91-c37771dca329\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vfstf" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466693 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/91c75381-2f50-415e-b5c8-e1261be30bbc-trusted-ca-bundle\") pod \"console-f9d7485db-778cz\" (UID: \"91c75381-2f50-415e-b5c8-e1261be30bbc\") " pod="openshift-console/console-f9d7485db-778cz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466709 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466725 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/91c75381-2f50-415e-b5c8-e1261be30bbc-console-config\") pod \"console-f9d7485db-778cz\" (UID: \"91c75381-2f50-415e-b5c8-e1261be30bbc\") " pod="openshift-console/console-f9d7485db-778cz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466742 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fx4jb\" (UniqueName: \"kubernetes.io/projected/0c7188d0-4020-4749-8bd6-98b637ce3f3c-kube-api-access-fx4jb\") pod \"route-controller-manager-6576b87f9c-2279c\" (UID: \"0c7188d0-4020-4749-8bd6-98b637ce3f3c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2279c" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466764 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/518731b7-0f61-40b4-ad6c-c49383c0dd5b-images\") pod \"machine-api-operator-5694c8668f-zk4vj\" (UID: \"518731b7-0f61-40b4-ad6c-c49383c0dd5b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zk4vj" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466781 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466798 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwk7m\" (UniqueName: \"kubernetes.io/projected/439d1335-3f44-4f26-ad21-b8580866130c-kube-api-access-lwk7m\") pod \"kube-storage-version-migrator-operator-b67b599dd-s8jnz\" (UID: \"439d1335-3f44-4f26-ad21-b8580866130c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s8jnz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466817 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-795pw\" (UniqueName: \"kubernetes.io/projected/df7f2c2a-0105-489e-9087-3fbf406856a9-kube-api-access-795pw\") pod \"console-operator-58897d9998-d6dsw\" (UID: \"df7f2c2a-0105-489e-9087-3fbf406856a9\") " pod="openshift-console-operator/console-operator-58897d9998-d6dsw" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466834 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/439d1335-3f44-4f26-ad21-b8580866130c-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-s8jnz\" (UID: \"439d1335-3f44-4f26-ad21-b8580866130c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s8jnz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466851 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/91c75381-2f50-415e-b5c8-e1261be30bbc-console-serving-cert\") pod \"console-f9d7485db-778cz\" (UID: \"91c75381-2f50-415e-b5c8-e1261be30bbc\") " pod="openshift-console/console-f9d7485db-778cz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466867 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466886 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/defcf8e8-7650-448f-9950-3434978ee21d-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-wpv7n\" (UID: \"defcf8e8-7650-448f-9950-3434978ee21d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466902 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8j8hc\" (UniqueName: \"kubernetes.io/projected/518731b7-0f61-40b4-ad6c-c49383c0dd5b-kube-api-access-8j8hc\") pod \"machine-api-operator-5694c8668f-zk4vj\" (UID: \"518731b7-0f61-40b4-ad6c-c49383c0dd5b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zk4vj" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466918 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-trusted-ca-bundle\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.467077 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51f64621-97df-411c-bb21-c24a7c2976be-config\") pod \"authentication-operator-69f744f599-79dwh\" (UID: \"51f64621-97df-411c-bb21-c24a7c2976be\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-79dwh" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.467289 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f3d9ea9f-6af6-42ea-9298-2e970da2572e-audit-dir\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.467366 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/defcf8e8-7650-448f-9950-3434978ee21d-audit-dir\") pod \"apiserver-7bbb656c7d-wpv7n\" (UID: \"defcf8e8-7650-448f-9950-3434978ee21d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.467882 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-trusted-ca-bundle\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.467943 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-node-pullsecrets\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.468007 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/91c75381-2f50-415e-b5c8-e1261be30bbc-service-ca\") pod \"console-f9d7485db-778cz\" (UID: \"91c75381-2f50-415e-b5c8-e1261be30bbc\") " pod="openshift-console/console-f9d7485db-778cz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.468152 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d2e8268-b920-41b6-a22d-50dca94f8a10-config\") pod \"openshift-apiserver-operator-796bbdcf4f-rbzzz\" (UID: \"3d2e8268-b920-41b6-a22d-50dca94f8a10\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rbzzz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.468465 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4b516291-ecb5-48f1-8279-1448a2ad8f03-auth-proxy-config\") pod \"machine-approver-56656f9798-pxlnb\" (UID: \"4b516291-ecb5-48f1-8279-1448a2ad8f03\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pxlnb" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.468811 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.468808 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/518731b7-0f61-40b4-ad6c-c49383c0dd5b-config\") pod \"machine-api-operator-5694c8668f-zk4vj\" (UID: \"518731b7-0f61-40b4-ad6c-c49383c0dd5b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zk4vj" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.469386 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f3d9ea9f-6af6-42ea-9298-2e970da2572e-audit-policies\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.469433 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/df7f2c2a-0105-489e-9087-3fbf406856a9-trusted-ca\") pod \"console-operator-58897d9998-d6dsw\" (UID: \"df7f2c2a-0105-489e-9087-3fbf406856a9\") " pod="openshift-console-operator/console-operator-58897d9998-d6dsw" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.469521 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/51f64621-97df-411c-bb21-c24a7c2976be-service-ca-bundle\") pod \"authentication-operator-69f744f599-79dwh\" (UID: \"51f64621-97df-411c-bb21-c24a7c2976be\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-79dwh" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.469550 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/91c75381-2f50-415e-b5c8-e1261be30bbc-oauth-serving-cert\") pod \"console-f9d7485db-778cz\" (UID: \"91c75381-2f50-415e-b5c8-e1261be30bbc\") " pod="openshift-console/console-f9d7485db-778cz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.469856 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/91c75381-2f50-415e-b5c8-e1261be30bbc-trusted-ca-bundle\") pod \"console-f9d7485db-778cz\" (UID: \"91c75381-2f50-415e-b5c8-e1261be30bbc\") " pod="openshift-console/console-f9d7485db-778cz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.469991 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.470491 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-rd6kc"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.471017 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-etcd-serving-ca\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.471089 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-audit-dir\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.471490 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-audit\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.471663 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/51f64621-97df-411c-bb21-c24a7c2976be-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-79dwh\" (UID: \"51f64621-97df-411c-bb21-c24a7c2976be\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-79dwh" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.471821 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-p8pxx"] Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.466407 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c7188d0-4020-4749-8bd6-98b637ce3f3c-config\") pod \"route-controller-manager-6576b87f9c-2279c\" (UID: \"0c7188d0-4020-4749-8bd6-98b637ce3f3c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2279c" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.472108 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0c7188d0-4020-4749-8bd6-98b637ce3f3c-client-ca\") pod \"route-controller-manager-6576b87f9c-2279c\" (UID: \"0c7188d0-4020-4749-8bd6-98b637ce3f3c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2279c" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.472384 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b516291-ecb5-48f1-8279-1448a2ad8f03-config\") pod \"machine-approver-56656f9798-pxlnb\" (UID: \"4b516291-ecb5-48f1-8279-1448a2ad8f03\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pxlnb" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.472916 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/518731b7-0f61-40b4-ad6c-c49383c0dd5b-images\") pod \"machine-api-operator-5694c8668f-zk4vj\" (UID: \"518731b7-0f61-40b4-ad6c-c49383c0dd5b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zk4vj" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.473234 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/91c75381-2f50-415e-b5c8-e1261be30bbc-console-config\") pod \"console-f9d7485db-778cz\" (UID: \"91c75381-2f50-415e-b5c8-e1261be30bbc\") " pod="openshift-console/console-f9d7485db-778cz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.473290 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-image-import-ca\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.473348 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-config\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.473929 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/91c75381-2f50-415e-b5c8-e1261be30bbc-console-oauth-config\") pod \"console-f9d7485db-778cz\" (UID: \"91c75381-2f50-415e-b5c8-e1261be30bbc\") " pod="openshift-console/console-f9d7485db-778cz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.473997 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/defcf8e8-7650-448f-9950-3434978ee21d-audit-policies\") pod \"apiserver-7bbb656c7d-wpv7n\" (UID: \"defcf8e8-7650-448f-9950-3434978ee21d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.474481 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/defcf8e8-7650-448f-9950-3434978ee21d-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-wpv7n\" (UID: \"defcf8e8-7650-448f-9950-3434978ee21d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.474568 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/defcf8e8-7650-448f-9950-3434978ee21d-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-wpv7n\" (UID: \"defcf8e8-7650-448f-9950-3434978ee21d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.474613 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.474838 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df7f2c2a-0105-489e-9087-3fbf406856a9-config\") pod \"console-operator-58897d9998-d6dsw\" (UID: \"df7f2c2a-0105-489e-9087-3fbf406856a9\") " pod="openshift-console-operator/console-operator-58897d9998-d6dsw" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.475592 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/defcf8e8-7650-448f-9950-3434978ee21d-encryption-config\") pod \"apiserver-7bbb656c7d-wpv7n\" (UID: \"defcf8e8-7650-448f-9950-3434978ee21d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.475795 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/51f64621-97df-411c-bb21-c24a7c2976be-serving-cert\") pod \"authentication-operator-69f744f599-79dwh\" (UID: \"51f64621-97df-411c-bb21-c24a7c2976be\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-79dwh" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.475817 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.475841 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-encryption-config\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.475879 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-etcd-client\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.475903 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.476743 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/518731b7-0f61-40b4-ad6c-c49383c0dd5b-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-zk4vj\" (UID: \"518731b7-0f61-40b4-ad6c-c49383c0dd5b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zk4vj" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.476781 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0c7188d0-4020-4749-8bd6-98b637ce3f3c-serving-cert\") pod \"route-controller-manager-6576b87f9c-2279c\" (UID: \"0c7188d0-4020-4749-8bd6-98b637ce3f3c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2279c" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.477071 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/91c75381-2f50-415e-b5c8-e1261be30bbc-console-serving-cert\") pod \"console-f9d7485db-778cz\" (UID: \"91c75381-2f50-415e-b5c8-e1261be30bbc\") " pod="openshift-console/console-f9d7485db-778cz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.477103 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/defcf8e8-7650-448f-9950-3434978ee21d-serving-cert\") pod \"apiserver-7bbb656c7d-wpv7n\" (UID: \"defcf8e8-7650-448f-9950-3434978ee21d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.477199 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.477480 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.477523 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-serving-cert\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.477537 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/df7f2c2a-0105-489e-9087-3fbf406856a9-serving-cert\") pod \"console-operator-58897d9998-d6dsw\" (UID: \"df7f2c2a-0105-489e-9087-3fbf406856a9\") " pod="openshift-console-operator/console-operator-58897d9998-d6dsw" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.477543 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.477779 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.479219 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/4b516291-ecb5-48f1-8279-1448a2ad8f03-machine-approver-tls\") pod \"machine-approver-56656f9798-pxlnb\" (UID: \"4b516291-ecb5-48f1-8279-1448a2ad8f03\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pxlnb" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.480335 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3d2e8268-b920-41b6-a22d-50dca94f8a10-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-rbzzz\" (UID: \"3d2e8268-b920-41b6-a22d-50dca94f8a10\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rbzzz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.483584 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.483913 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.484312 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.484639 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/defcf8e8-7650-448f-9950-3434978ee21d-etcd-client\") pod \"apiserver-7bbb656c7d-wpv7n\" (UID: \"defcf8e8-7650-448f-9950-3434978ee21d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.496565 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.516863 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.536259 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.557207 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.567460 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/439d1335-3f44-4f26-ad21-b8580866130c-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-s8jnz\" (UID: \"439d1335-3f44-4f26-ad21-b8580866130c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s8jnz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.567548 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a302e9ed-44a5-41e8-8e91-c37771dca329-config\") pod \"controller-manager-879f6c89f-vfstf\" (UID: \"a302e9ed-44a5-41e8-8e91-c37771dca329\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vfstf" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.567593 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a302e9ed-44a5-41e8-8e91-c37771dca329-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-vfstf\" (UID: \"a302e9ed-44a5-41e8-8e91-c37771dca329\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vfstf" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.567645 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwk7m\" (UniqueName: \"kubernetes.io/projected/439d1335-3f44-4f26-ad21-b8580866130c-kube-api-access-lwk7m\") pod \"kube-storage-version-migrator-operator-b67b599dd-s8jnz\" (UID: \"439d1335-3f44-4f26-ad21-b8580866130c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s8jnz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.567690 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/439d1335-3f44-4f26-ad21-b8580866130c-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-s8jnz\" (UID: \"439d1335-3f44-4f26-ad21-b8580866130c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s8jnz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.567782 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a302e9ed-44a5-41e8-8e91-c37771dca329-serving-cert\") pod \"controller-manager-879f6c89f-vfstf\" (UID: \"a302e9ed-44a5-41e8-8e91-c37771dca329\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vfstf" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.567813 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcwxk\" (UniqueName: \"kubernetes.io/projected/a302e9ed-44a5-41e8-8e91-c37771dca329-kube-api-access-wcwxk\") pod \"controller-manager-879f6c89f-vfstf\" (UID: \"a302e9ed-44a5-41e8-8e91-c37771dca329\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vfstf" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.567839 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a302e9ed-44a5-41e8-8e91-c37771dca329-client-ca\") pod \"controller-manager-879f6c89f-vfstf\" (UID: \"a302e9ed-44a5-41e8-8e91-c37771dca329\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vfstf" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.569000 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a302e9ed-44a5-41e8-8e91-c37771dca329-config\") pod \"controller-manager-879f6c89f-vfstf\" (UID: \"a302e9ed-44a5-41e8-8e91-c37771dca329\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vfstf" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.569112 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a302e9ed-44a5-41e8-8e91-c37771dca329-client-ca\") pod \"controller-manager-879f6c89f-vfstf\" (UID: \"a302e9ed-44a5-41e8-8e91-c37771dca329\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vfstf" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.569228 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a302e9ed-44a5-41e8-8e91-c37771dca329-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-vfstf\" (UID: \"a302e9ed-44a5-41e8-8e91-c37771dca329\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vfstf" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.572361 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a302e9ed-44a5-41e8-8e91-c37771dca329-serving-cert\") pod \"controller-manager-879f6c89f-vfstf\" (UID: \"a302e9ed-44a5-41e8-8e91-c37771dca329\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vfstf" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.576834 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.616232 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.636683 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.656759 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.676127 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.697156 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.716829 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.737408 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.741740 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/439d1335-3f44-4f26-ad21-b8580866130c-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-s8jnz\" (UID: \"439d1335-3f44-4f26-ad21-b8580866130c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s8jnz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.756721 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.758560 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/439d1335-3f44-4f26-ad21-b8580866130c-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-s8jnz\" (UID: \"439d1335-3f44-4f26-ad21-b8580866130c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s8jnz" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.777087 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.796691 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.817259 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.836782 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.856488 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.877174 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.897475 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.917390 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.937617 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.957018 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.976674 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 05 05:54:19 crc kubenswrapper[4742]: I1205 05:54:19.997547 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.017577 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.036702 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.057190 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.077605 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.097433 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.117423 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.136924 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.157539 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.177244 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.197389 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.217676 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.238270 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.257376 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.277691 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.298100 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.317373 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.337476 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.357388 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.375758 4742 request.go:700] Waited for 1.00087963s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/secrets?fieldSelector=metadata.name%3Dmachine-config-operator-dockercfg-98p87&limit=500&resourceVersion=0 Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.377750 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.396775 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.417425 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.437657 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.457306 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.496898 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.518132 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.539865 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.557723 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.578000 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.598079 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.617863 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.637859 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.657952 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.677291 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.698358 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.717924 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.737190 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.757341 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.777968 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.797011 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.817478 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.837545 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.857109 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.876636 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.897948 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.917140 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.937412 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.966558 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.978197 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 05 05:54:20 crc kubenswrapper[4742]: I1205 05:54:20.997690 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.016582 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.038026 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.057277 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.077738 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.097562 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.117779 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.137488 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.158100 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.178157 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.197674 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.233760 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vr779\" (UniqueName: \"kubernetes.io/projected/4b516291-ecb5-48f1-8279-1448a2ad8f03-kube-api-access-vr779\") pod \"machine-approver-56656f9798-pxlnb\" (UID: \"4b516291-ecb5-48f1-8279-1448a2ad8f03\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pxlnb" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.257021 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.266737 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkxbz\" (UniqueName: \"kubernetes.io/projected/bfef6735-7572-4411-b37d-b194d84534de-kube-api-access-vkxbz\") pod \"downloads-7954f5f757-gn9cx\" (UID: \"bfef6735-7572-4411-b37d-b194d84534de\") " pod="openshift-console/downloads-7954f5f757-gn9cx" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.277958 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.328137 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hl2cj\" (UniqueName: \"kubernetes.io/projected/b5c1e336-d85b-42a8-a268-2fed8fe3fe98-kube-api-access-hl2cj\") pod \"apiserver-76f77b778f-7mhpl\" (UID: \"b5c1e336-d85b-42a8-a268-2fed8fe3fe98\") " pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.345168 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66zhn\" (UniqueName: \"kubernetes.io/projected/3d2e8268-b920-41b6-a22d-50dca94f8a10-kube-api-access-66zhn\") pod \"openshift-apiserver-operator-796bbdcf4f-rbzzz\" (UID: \"3d2e8268-b920-41b6-a22d-50dca94f8a10\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rbzzz" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.357558 4742 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.371608 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqn87\" (UniqueName: \"kubernetes.io/projected/51f64621-97df-411c-bb21-c24a7c2976be-kube-api-access-vqn87\") pod \"authentication-operator-69f744f599-79dwh\" (UID: \"51f64621-97df-411c-bb21-c24a7c2976be\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-79dwh" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.375813 4742 request.go:700] Waited for 1.905162832s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/hostpath-provisioner/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.378466 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.397453 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.416820 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fx4jb\" (UniqueName: \"kubernetes.io/projected/0c7188d0-4020-4749-8bd6-98b637ce3f3c-kube-api-access-fx4jb\") pod \"route-controller-manager-6576b87f9c-2279c\" (UID: \"0c7188d0-4020-4749-8bd6-98b637ce3f3c\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2279c" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.432165 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6smz\" (UniqueName: \"kubernetes.io/projected/91c75381-2f50-415e-b5c8-e1261be30bbc-kube-api-access-z6smz\") pod \"console-f9d7485db-778cz\" (UID: \"91c75381-2f50-415e-b5c8-e1261be30bbc\") " pod="openshift-console/console-f9d7485db-778cz" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.456385 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txc45\" (UniqueName: \"kubernetes.io/projected/defcf8e8-7650-448f-9950-3434978ee21d-kube-api-access-txc45\") pod \"apiserver-7bbb656c7d-wpv7n\" (UID: \"defcf8e8-7650-448f-9950-3434978ee21d\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.471207 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pxlnb" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.472296 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rbzzz" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.474821 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8j8hc\" (UniqueName: \"kubernetes.io/projected/518731b7-0f61-40b4-ad6c-c49383c0dd5b-kube-api-access-8j8hc\") pod \"machine-api-operator-5694c8668f-zk4vj\" (UID: \"518731b7-0f61-40b4-ad6c-c49383c0dd5b\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-zk4vj" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.499128 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-79dwh" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.508360 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-778cz" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.509361 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-795pw\" (UniqueName: \"kubernetes.io/projected/df7f2c2a-0105-489e-9087-3fbf406856a9-kube-api-access-795pw\") pod \"console-operator-58897d9998-d6dsw\" (UID: \"df7f2c2a-0105-489e-9087-3fbf406856a9\") " pod="openshift-console-operator/console-operator-58897d9998-d6dsw" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.520493 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-d6dsw" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.520701 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lc5fq\" (UniqueName: \"kubernetes.io/projected/f3d9ea9f-6af6-42ea-9298-2e970da2572e-kube-api-access-lc5fq\") pod \"oauth-openshift-558db77b4-45686\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.532389 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2279c" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.533970 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-gn9cx" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.545809 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwk7m\" (UniqueName: \"kubernetes.io/projected/439d1335-3f44-4f26-ad21-b8580866130c-kube-api-access-lwk7m\") pod \"kube-storage-version-migrator-operator-b67b599dd-s8jnz\" (UID: \"439d1335-3f44-4f26-ad21-b8580866130c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s8jnz" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.552315 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcwxk\" (UniqueName: \"kubernetes.io/projected/a302e9ed-44a5-41e8-8e91-c37771dca329-kube-api-access-wcwxk\") pod \"controller-manager-879f6c89f-vfstf\" (UID: \"a302e9ed-44a5-41e8-8e91-c37771dca329\") " pod="openshift-controller-manager/controller-manager-879f6c89f-vfstf" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.572245 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-vfstf" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.593970 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8fvzr\" (UniqueName: \"kubernetes.io/projected/e6e17c17-82b4-489b-a8c3-05b44b99e427-kube-api-access-8fvzr\") pod \"machine-config-operator-74547568cd-24g5k\" (UID: \"e6e17c17-82b4-489b-a8c3-05b44b99e427\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-24g5k" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.594049 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/e6e17c17-82b4-489b-a8c3-05b44b99e427-images\") pod \"machine-config-operator-74547568cd-24g5k\" (UID: \"e6e17c17-82b4-489b-a8c3-05b44b99e427\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-24g5k" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.594166 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gqlmg\" (UniqueName: \"kubernetes.io/projected/4eedeb03-3593-40b4-954b-76a312b87bbf-kube-api-access-gqlmg\") pod \"openshift-config-operator-7777fb866f-flkxk\" (UID: \"4eedeb03-3593-40b4-954b-76a312b87bbf\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-flkxk" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.594270 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4eedeb03-3593-40b4-954b-76a312b87bbf-serving-cert\") pod \"openshift-config-operator-7777fb866f-flkxk\" (UID: \"4eedeb03-3593-40b4-954b-76a312b87bbf\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-flkxk" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.594304 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxmcm\" (UniqueName: \"kubernetes.io/projected/f2d129d3-f117-492c-a680-a03e1ca560e1-kube-api-access-zxmcm\") pod \"router-default-5444994796-nfv6w\" (UID: \"f2d129d3-f117-492c-a680-a03e1ca560e1\") " pod="openshift-ingress/router-default-5444994796-nfv6w" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.594338 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/f2d129d3-f117-492c-a680-a03e1ca560e1-default-certificate\") pod \"router-default-5444994796-nfv6w\" (UID: \"f2d129d3-f117-492c-a680-a03e1ca560e1\") " pod="openshift-ingress/router-default-5444994796-nfv6w" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.594369 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f2d129d3-f117-492c-a680-a03e1ca560e1-metrics-certs\") pod \"router-default-5444994796-nfv6w\" (UID: \"f2d129d3-f117-492c-a680-a03e1ca560e1\") " pod="openshift-ingress/router-default-5444994796-nfv6w" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.594403 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9l85h\" (UniqueName: \"kubernetes.io/projected/ef9d3f4a-2357-4cf8-94d6-5c9bb31a2e5c-kube-api-access-9l85h\") pod \"ingress-operator-5b745b69d9-4ndvx\" (UID: \"ef9d3f4a-2357-4cf8-94d6-5c9bb31a2e5c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4ndvx" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.594471 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e6e17c17-82b4-489b-a8c3-05b44b99e427-proxy-tls\") pod \"machine-config-operator-74547568cd-24g5k\" (UID: \"e6e17c17-82b4-489b-a8c3-05b44b99e427\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-24g5k" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.594503 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/afacd0cf-c997-4688-bc5c-17c8f729f9c9-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-k5bqg\" (UID: \"afacd0cf-c997-4688-bc5c-17c8f729f9c9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k5bqg" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.594556 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0f6dbf90-5722-4c03-b815-aa25831f6942-serving-cert\") pod \"etcd-operator-b45778765-2cttt\" (UID: \"0f6dbf90-5722-4c03-b815-aa25831f6942\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2cttt" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.594610 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/57ef3310-7e27-4216-9fa1-ccaa1c61cd4a-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-cqzqd\" (UID: \"57ef3310-7e27-4216-9fa1-ccaa1c61cd4a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-cqzqd" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.594643 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/0f6dbf90-5722-4c03-b815-aa25831f6942-etcd-service-ca\") pod \"etcd-operator-b45778765-2cttt\" (UID: \"0f6dbf90-5722-4c03-b815-aa25831f6942\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2cttt" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.594676 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1dbf36d7-4088-4d93-90ec-5795e82dbc42-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-vntjw\" (UID: \"1dbf36d7-4088-4d93-90ec-5795e82dbc42\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vntjw" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.594707 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f6dbf90-5722-4c03-b815-aa25831f6942-config\") pod \"etcd-operator-b45778765-2cttt\" (UID: \"0f6dbf90-5722-4c03-b815-aa25831f6942\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2cttt" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.594737 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0f6dbf90-5722-4c03-b815-aa25831f6942-etcd-client\") pod \"etcd-operator-b45778765-2cttt\" (UID: \"0f6dbf90-5722-4c03-b815-aa25831f6942\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2cttt" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.594799 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/0f6dbf90-5722-4c03-b815-aa25831f6942-etcd-ca\") pod \"etcd-operator-b45778765-2cttt\" (UID: \"0f6dbf90-5722-4c03-b815-aa25831f6942\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2cttt" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.594907 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e9d19072-f6f9-42da-8b86-5d6bff4b340c-trusted-ca\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.594972 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2sbt\" (UniqueName: \"kubernetes.io/projected/0f7b345c-76d4-4f77-9b9d-7a9678976492-kube-api-access-w2sbt\") pod \"migrator-59844c95c7-7j9tp\" (UID: \"0f7b345c-76d4-4f77-9b9d-7a9678976492\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7j9tp" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.595004 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c932a965-637c-4db4-8dc4-b458856a4275-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mztl8\" (UID: \"c932a965-637c-4db4-8dc4-b458856a4275\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mztl8" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.595095 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c932a965-637c-4db4-8dc4-b458856a4275-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mztl8\" (UID: \"c932a965-637c-4db4-8dc4-b458856a4275\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mztl8" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.595132 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rtbh\" (UniqueName: \"kubernetes.io/projected/afacd0cf-c997-4688-bc5c-17c8f729f9c9-kube-api-access-4rtbh\") pod \"cluster-image-registry-operator-dc59b4c8b-k5bqg\" (UID: \"afacd0cf-c997-4688-bc5c-17c8f729f9c9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k5bqg" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.595184 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/ae2344f6-2b2e-4071-a215-77b7513f3138-proxy-tls\") pod \"machine-config-controller-84d6567774-xmjng\" (UID: \"ae2344f6-2b2e-4071-a215-77b7513f3138\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xmjng" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.595216 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/afacd0cf-c997-4688-bc5c-17c8f729f9c9-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-k5bqg\" (UID: \"afacd0cf-c997-4688-bc5c-17c8f729f9c9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k5bqg" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.595249 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/f2d129d3-f117-492c-a680-a03e1ca560e1-stats-auth\") pod \"router-default-5444994796-nfv6w\" (UID: \"f2d129d3-f117-492c-a680-a03e1ca560e1\") " pod="openshift-ingress/router-default-5444994796-nfv6w" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.606317 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/956af782-f4c5-4000-9fdb-2693248d5b52-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-hxw88\" (UID: \"956af782-f4c5-4000-9fdb-2693248d5b52\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hxw88" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.606402 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1dbf36d7-4088-4d93-90ec-5795e82dbc42-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-vntjw\" (UID: \"1dbf36d7-4088-4d93-90ec-5795e82dbc42\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vntjw" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.606432 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e9d19072-f6f9-42da-8b86-5d6bff4b340c-installation-pull-secrets\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.606495 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/afacd0cf-c997-4688-bc5c-17c8f729f9c9-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-k5bqg\" (UID: \"afacd0cf-c997-4688-bc5c-17c8f729f9c9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k5bqg" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.606531 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ef9d3f4a-2357-4cf8-94d6-5c9bb31a2e5c-bound-sa-token\") pod \"ingress-operator-5b745b69d9-4ndvx\" (UID: \"ef9d3f4a-2357-4cf8-94d6-5c9bb31a2e5c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4ndvx" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.606559 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e9d19072-f6f9-42da-8b86-5d6bff4b340c-ca-trust-extracted\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.606637 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6acfde43-1835-4664-9b34-bacd8d98a715-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-x9xsd\" (UID: \"6acfde43-1835-4664-9b34-bacd8d98a715\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x9xsd" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.606661 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6acfde43-1835-4664-9b34-bacd8d98a715-config\") pod \"kube-controller-manager-operator-78b949d7b-x9xsd\" (UID: \"6acfde43-1835-4664-9b34-bacd8d98a715\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x9xsd" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.606684 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ae2344f6-2b2e-4071-a215-77b7513f3138-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-xmjng\" (UID: \"ae2344f6-2b2e-4071-a215-77b7513f3138\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xmjng" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.606751 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/4eedeb03-3593-40b4-954b-76a312b87bbf-available-featuregates\") pod \"openshift-config-operator-7777fb866f-flkxk\" (UID: \"4eedeb03-3593-40b4-954b-76a312b87bbf\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-flkxk" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.606786 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.606812 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ef9d3f4a-2357-4cf8-94d6-5c9bb31a2e5c-trusted-ca\") pod \"ingress-operator-5b745b69d9-4ndvx\" (UID: \"ef9d3f4a-2357-4cf8-94d6-5c9bb31a2e5c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4ndvx" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.606839 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/eef2f79c-627c-4bd0-829a-8c16b1f85143-metrics-tls\") pod \"dns-operator-744455d44c-xqfl8\" (UID: \"eef2f79c-627c-4bd0-829a-8c16b1f85143\") " pod="openshift-dns-operator/dns-operator-744455d44c-xqfl8" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.606878 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e9d19072-f6f9-42da-8b86-5d6bff4b340c-registry-certificates\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.606912 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftpzm\" (UniqueName: \"kubernetes.io/projected/57ef3310-7e27-4216-9fa1-ccaa1c61cd4a-kube-api-access-ftpzm\") pod \"cluster-samples-operator-665b6dd947-cqzqd\" (UID: \"57ef3310-7e27-4216-9fa1-ccaa1c61cd4a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-cqzqd" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.606978 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlm9l\" (UniqueName: \"kubernetes.io/projected/eef2f79c-627c-4bd0-829a-8c16b1f85143-kube-api-access-zlm9l\") pod \"dns-operator-744455d44c-xqfl8\" (UID: \"eef2f79c-627c-4bd0-829a-8c16b1f85143\") " pod="openshift-dns-operator/dns-operator-744455d44c-xqfl8" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.607030 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/956af782-f4c5-4000-9fdb-2693248d5b52-config\") pod \"kube-apiserver-operator-766d6c64bb-hxw88\" (UID: \"956af782-f4c5-4000-9fdb-2693248d5b52\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hxw88" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.607120 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/93cbfc1e-ff82-4309-a7a8-dd57f1fc6616-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-rsr6b\" (UID: \"93cbfc1e-ff82-4309-a7a8-dd57f1fc6616\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rsr6b" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.607147 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hw5cq\" (UniqueName: \"kubernetes.io/projected/ae2344f6-2b2e-4071-a215-77b7513f3138-kube-api-access-hw5cq\") pod \"machine-config-controller-84d6567774-xmjng\" (UID: \"ae2344f6-2b2e-4071-a215-77b7513f3138\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xmjng" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.607182 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbdvh\" (UniqueName: \"kubernetes.io/projected/93cbfc1e-ff82-4309-a7a8-dd57f1fc6616-kube-api-access-vbdvh\") pod \"control-plane-machine-set-operator-78cbb6b69f-rsr6b\" (UID: \"93cbfc1e-ff82-4309-a7a8-dd57f1fc6616\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rsr6b" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.607204 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e9d19072-f6f9-42da-8b86-5d6bff4b340c-bound-sa-token\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.607225 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e9d19072-f6f9-42da-8b86-5d6bff4b340c-registry-tls\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.607318 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f2d129d3-f117-492c-a680-a03e1ca560e1-service-ca-bundle\") pod \"router-default-5444994796-nfv6w\" (UID: \"f2d129d3-f117-492c-a680-a03e1ca560e1\") " pod="openshift-ingress/router-default-5444994796-nfv6w" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.607342 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfvpw\" (UniqueName: \"kubernetes.io/projected/1dbf36d7-4088-4d93-90ec-5795e82dbc42-kube-api-access-hfvpw\") pod \"openshift-controller-manager-operator-756b6f6bc6-vntjw\" (UID: \"1dbf36d7-4088-4d93-90ec-5795e82dbc42\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vntjw" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.607364 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8r8c\" (UniqueName: \"kubernetes.io/projected/0f6dbf90-5722-4c03-b815-aa25831f6942-kube-api-access-b8r8c\") pod \"etcd-operator-b45778765-2cttt\" (UID: \"0f6dbf90-5722-4c03-b815-aa25831f6942\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2cttt" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.607386 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6acfde43-1835-4664-9b34-bacd8d98a715-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-x9xsd\" (UID: \"6acfde43-1835-4664-9b34-bacd8d98a715\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x9xsd" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.607438 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c932a965-637c-4db4-8dc4-b458856a4275-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mztl8\" (UID: \"c932a965-637c-4db4-8dc4-b458856a4275\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mztl8" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.607461 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ef9d3f4a-2357-4cf8-94d6-5c9bb31a2e5c-metrics-tls\") pod \"ingress-operator-5b745b69d9-4ndvx\" (UID: \"ef9d3f4a-2357-4cf8-94d6-5c9bb31a2e5c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4ndvx" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.607502 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28kw5\" (UniqueName: \"kubernetes.io/projected/e9d19072-f6f9-42da-8b86-5d6bff4b340c-kube-api-access-28kw5\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.607557 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/956af782-f4c5-4000-9fdb-2693248d5b52-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-hxw88\" (UID: \"956af782-f4c5-4000-9fdb-2693248d5b52\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hxw88" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.607579 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e6e17c17-82b4-489b-a8c3-05b44b99e427-auth-proxy-config\") pod \"machine-config-operator-74547568cd-24g5k\" (UID: \"e6e17c17-82b4-489b-a8c3-05b44b99e427\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-24g5k" Dec 05 05:54:21 crc kubenswrapper[4742]: E1205 05:54:21.610029 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:22.110012269 +0000 UTC m=+138.022147391 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.640656 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s8jnz" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.659240 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-7mhpl"] Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.708253 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.708584 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0f6dbf90-5722-4c03-b815-aa25831f6942-serving-cert\") pod \"etcd-operator-b45778765-2cttt\" (UID: \"0f6dbf90-5722-4c03-b815-aa25831f6942\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2cttt" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.708619 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/57ef3310-7e27-4216-9fa1-ccaa1c61cd4a-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-cqzqd\" (UID: \"57ef3310-7e27-4216-9fa1-ccaa1c61cd4a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-cqzqd" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.708669 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/0f6dbf90-5722-4c03-b815-aa25831f6942-etcd-service-ca\") pod \"etcd-operator-b45778765-2cttt\" (UID: \"0f6dbf90-5722-4c03-b815-aa25831f6942\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2cttt" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.708705 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f6dbf90-5722-4c03-b815-aa25831f6942-config\") pod \"etcd-operator-b45778765-2cttt\" (UID: \"0f6dbf90-5722-4c03-b815-aa25831f6942\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2cttt" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.708754 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0f6dbf90-5722-4c03-b815-aa25831f6942-etcd-client\") pod \"etcd-operator-b45778765-2cttt\" (UID: \"0f6dbf90-5722-4c03-b815-aa25831f6942\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2cttt" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.708782 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/d42b86d2-579c-4fa1-aeb4-6d3d7a47798d-tmpfs\") pod \"packageserver-d55dfcdfc-9wtdc\" (UID: \"d42b86d2-579c-4fa1-aeb4-6d3d7a47798d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9wtdc" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.708845 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1dbf36d7-4088-4d93-90ec-5795e82dbc42-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-vntjw\" (UID: \"1dbf36d7-4088-4d93-90ec-5795e82dbc42\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vntjw" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.708869 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w25rr\" (UniqueName: \"kubernetes.io/projected/930e305a-35d6-4053-8064-58fb2662d8b0-kube-api-access-w25rr\") pod \"multus-admission-controller-857f4d67dd-fpfzc\" (UID: \"930e305a-35d6-4053-8064-58fb2662d8b0\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fpfzc" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.708911 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/2a89062e-1da0-4abd-a415-92c6fd9e76f4-signing-cabundle\") pod \"service-ca-9c57cc56f-7jwfc\" (UID: \"2a89062e-1da0-4abd-a415-92c6fd9e76f4\") " pod="openshift-service-ca/service-ca-9c57cc56f-7jwfc" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.708930 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/0f6dbf90-5722-4c03-b815-aa25831f6942-etcd-ca\") pod \"etcd-operator-b45778765-2cttt\" (UID: \"0f6dbf90-5722-4c03-b815-aa25831f6942\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2cttt" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.708947 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/e383eaae-c654-4e64-be23-cb7a9cef6df7-mountpoint-dir\") pod \"csi-hostpathplugin-p8pxx\" (UID: \"e383eaae-c654-4e64-be23-cb7a9cef6df7\") " pod="hostpath-provisioner/csi-hostpathplugin-p8pxx" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.709010 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ljxw\" (UniqueName: \"kubernetes.io/projected/3cf6ebcf-1dcf-4bcf-a2bb-80b4b7d7cd21-kube-api-access-7ljxw\") pod \"service-ca-operator-777779d784-bxxl2\" (UID: \"3cf6ebcf-1dcf-4bcf-a2bb-80b4b7d7cd21\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bxxl2" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.709086 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e9d19072-f6f9-42da-8b86-5d6bff4b340c-trusted-ca\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.709105 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/997634d0-c379-4978-a8a5-4da39a072ff4-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-dfmr9\" (UID: \"997634d0-c379-4978-a8a5-4da39a072ff4\") " pod="openshift-marketplace/marketplace-operator-79b997595-dfmr9" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.709122 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/e383eaae-c654-4e64-be23-cb7a9cef6df7-registration-dir\") pod \"csi-hostpathplugin-p8pxx\" (UID: \"e383eaae-c654-4e64-be23-cb7a9cef6df7\") " pod="hostpath-provisioner/csi-hostpathplugin-p8pxx" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.709165 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2sbt\" (UniqueName: \"kubernetes.io/projected/0f7b345c-76d4-4f77-9b9d-7a9678976492-kube-api-access-w2sbt\") pod \"migrator-59844c95c7-7j9tp\" (UID: \"0f7b345c-76d4-4f77-9b9d-7a9678976492\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7j9tp" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.709182 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0d05e5b0-e7d1-4e07-923d-79473a1532d2-srv-cert\") pod \"olm-operator-6b444d44fb-58gtq\" (UID: \"0d05e5b0-e7d1-4e07-923d-79473a1532d2\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-58gtq" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.709200 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c932a965-637c-4db4-8dc4-b458856a4275-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mztl8\" (UID: \"c932a965-637c-4db4-8dc4-b458856a4275\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mztl8" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.709266 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/997634d0-c379-4978-a8a5-4da39a072ff4-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-dfmr9\" (UID: \"997634d0-c379-4978-a8a5-4da39a072ff4\") " pod="openshift-marketplace/marketplace-operator-79b997595-dfmr9" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.709286 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/2a89062e-1da0-4abd-a415-92c6fd9e76f4-signing-key\") pod \"service-ca-9c57cc56f-7jwfc\" (UID: \"2a89062e-1da0-4abd-a415-92c6fd9e76f4\") " pod="openshift-service-ca/service-ca-9c57cc56f-7jwfc" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.709324 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ebaea921-5d50-4d64-b73e-db0feab77248-config-volume\") pod \"collect-profiles-29415225-grqtz\" (UID: \"ebaea921-5d50-4d64-b73e-db0feab77248\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-grqtz" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.709344 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c932a965-637c-4db4-8dc4-b458856a4275-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mztl8\" (UID: \"c932a965-637c-4db4-8dc4-b458856a4275\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mztl8" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.709360 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rtbh\" (UniqueName: \"kubernetes.io/projected/afacd0cf-c997-4688-bc5c-17c8f729f9c9-kube-api-access-4rtbh\") pod \"cluster-image-registry-operator-dc59b4c8b-k5bqg\" (UID: \"afacd0cf-c997-4688-bc5c-17c8f729f9c9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k5bqg" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.709398 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/88aaa7d3-241a-422a-807b-fb64376527c4-certs\") pod \"machine-config-server-ngjs9\" (UID: \"88aaa7d3-241a-422a-807b-fb64376527c4\") " pod="openshift-machine-config-operator/machine-config-server-ngjs9" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.709417 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/71735d28-4973-432d-9884-8979622121ec-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-z8tpl\" (UID: \"71735d28-4973-432d-9884-8979622121ec\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z8tpl" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.709437 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/ae2344f6-2b2e-4071-a215-77b7513f3138-proxy-tls\") pod \"machine-config-controller-84d6567774-xmjng\" (UID: \"ae2344f6-2b2e-4071-a215-77b7513f3138\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xmjng" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.709499 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/afacd0cf-c997-4688-bc5c-17c8f729f9c9-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-k5bqg\" (UID: \"afacd0cf-c997-4688-bc5c-17c8f729f9c9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k5bqg" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.709526 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6khn\" (UniqueName: \"kubernetes.io/projected/88aaa7d3-241a-422a-807b-fb64376527c4-kube-api-access-s6khn\") pod \"machine-config-server-ngjs9\" (UID: \"88aaa7d3-241a-422a-807b-fb64376527c4\") " pod="openshift-machine-config-operator/machine-config-server-ngjs9" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.709586 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/f2d129d3-f117-492c-a680-a03e1ca560e1-stats-auth\") pod \"router-default-5444994796-nfv6w\" (UID: \"f2d129d3-f117-492c-a680-a03e1ca560e1\") " pod="openshift-ingress/router-default-5444994796-nfv6w" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.709605 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/956af782-f4c5-4000-9fdb-2693248d5b52-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-hxw88\" (UID: \"956af782-f4c5-4000-9fdb-2693248d5b52\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hxw88" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.709650 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9d820c8f-0796-4541-b7f9-bc7927cdbb45-metrics-tls\") pod \"dns-default-rd6kc\" (UID: \"9d820c8f-0796-4541-b7f9-bc7927cdbb45\") " pod="openshift-dns/dns-default-rd6kc" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.709679 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1dbf36d7-4088-4d93-90ec-5795e82dbc42-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-vntjw\" (UID: \"1dbf36d7-4088-4d93-90ec-5795e82dbc42\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vntjw" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.709695 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e9d19072-f6f9-42da-8b86-5d6bff4b340c-installation-pull-secrets\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.709733 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pf9h9\" (UniqueName: \"kubernetes.io/projected/0d05e5b0-e7d1-4e07-923d-79473a1532d2-kube-api-access-pf9h9\") pod \"olm-operator-6b444d44fb-58gtq\" (UID: \"0d05e5b0-e7d1-4e07-923d-79473a1532d2\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-58gtq" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.709748 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d42b86d2-579c-4fa1-aeb4-6d3d7a47798d-apiservice-cert\") pod \"packageserver-d55dfcdfc-9wtdc\" (UID: \"d42b86d2-579c-4fa1-aeb4-6d3d7a47798d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9wtdc" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.709774 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/afacd0cf-c997-4688-bc5c-17c8f729f9c9-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-k5bqg\" (UID: \"afacd0cf-c997-4688-bc5c-17c8f729f9c9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k5bqg" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.709826 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ef9d3f4a-2357-4cf8-94d6-5c9bb31a2e5c-bound-sa-token\") pod \"ingress-operator-5b745b69d9-4ndvx\" (UID: \"ef9d3f4a-2357-4cf8-94d6-5c9bb31a2e5c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4ndvx" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.709843 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e9d19072-f6f9-42da-8b86-5d6bff4b340c-ca-trust-extracted\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.709902 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6acfde43-1835-4664-9b34-bacd8d98a715-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-x9xsd\" (UID: \"6acfde43-1835-4664-9b34-bacd8d98a715\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x9xsd" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.709919 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6acfde43-1835-4664-9b34-bacd8d98a715-config\") pod \"kube-controller-manager-operator-78b949d7b-x9xsd\" (UID: \"6acfde43-1835-4664-9b34-bacd8d98a715\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x9xsd" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.709941 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d42b86d2-579c-4fa1-aeb4-6d3d7a47798d-webhook-cert\") pod \"packageserver-d55dfcdfc-9wtdc\" (UID: \"d42b86d2-579c-4fa1-aeb4-6d3d7a47798d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9wtdc" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.709980 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ae2344f6-2b2e-4071-a215-77b7513f3138-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-xmjng\" (UID: \"ae2344f6-2b2e-4071-a215-77b7513f3138\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xmjng" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.709996 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9d820c8f-0796-4541-b7f9-bc7927cdbb45-config-volume\") pod \"dns-default-rd6kc\" (UID: \"9d820c8f-0796-4541-b7f9-bc7927cdbb45\") " pod="openshift-dns/dns-default-rd6kc" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.710012 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ebaea921-5d50-4d64-b73e-db0feab77248-secret-volume\") pod \"collect-profiles-29415225-grqtz\" (UID: \"ebaea921-5d50-4d64-b73e-db0feab77248\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-grqtz" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.710090 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9kkww\" (UniqueName: \"kubernetes.io/projected/2a89062e-1da0-4abd-a415-92c6fd9e76f4-kube-api-access-9kkww\") pod \"service-ca-9c57cc56f-7jwfc\" (UID: \"2a89062e-1da0-4abd-a415-92c6fd9e76f4\") " pod="openshift-service-ca/service-ca-9c57cc56f-7jwfc" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.710129 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/4eedeb03-3593-40b4-954b-76a312b87bbf-available-featuregates\") pod \"openshift-config-operator-7777fb866f-flkxk\" (UID: \"4eedeb03-3593-40b4-954b-76a312b87bbf\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-flkxk" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.710157 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ef9d3f4a-2357-4cf8-94d6-5c9bb31a2e5c-trusted-ca\") pod \"ingress-operator-5b745b69d9-4ndvx\" (UID: \"ef9d3f4a-2357-4cf8-94d6-5c9bb31a2e5c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4ndvx" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.710172 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0100b9e7-bc1f-4ada-ac18-11bba1edd54b-srv-cert\") pod \"catalog-operator-68c6474976-wvkz9\" (UID: \"0100b9e7-bc1f-4ada-ac18-11bba1edd54b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wvkz9" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.710227 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/eef2f79c-627c-4bd0-829a-8c16b1f85143-metrics-tls\") pod \"dns-operator-744455d44c-xqfl8\" (UID: \"eef2f79c-627c-4bd0-829a-8c16b1f85143\") " pod="openshift-dns-operator/dns-operator-744455d44c-xqfl8" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.710244 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/930e305a-35d6-4053-8064-58fb2662d8b0-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-fpfzc\" (UID: \"930e305a-35d6-4053-8064-58fb2662d8b0\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fpfzc" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.710259 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/e383eaae-c654-4e64-be23-cb7a9cef6df7-plugins-dir\") pod \"csi-hostpathplugin-p8pxx\" (UID: \"e383eaae-c654-4e64-be23-cb7a9cef6df7\") " pod="hostpath-provisioner/csi-hostpathplugin-p8pxx" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.710303 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e9d19072-f6f9-42da-8b86-5d6bff4b340c-registry-certificates\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.710320 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftpzm\" (UniqueName: \"kubernetes.io/projected/57ef3310-7e27-4216-9fa1-ccaa1c61cd4a-kube-api-access-ftpzm\") pod \"cluster-samples-operator-665b6dd947-cqzqd\" (UID: \"57ef3310-7e27-4216-9fa1-ccaa1c61cd4a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-cqzqd" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.710343 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3cf6ebcf-1dcf-4bcf-a2bb-80b4b7d7cd21-serving-cert\") pod \"service-ca-operator-777779d784-bxxl2\" (UID: \"3cf6ebcf-1dcf-4bcf-a2bb-80b4b7d7cd21\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bxxl2" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.710882 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlm9l\" (UniqueName: \"kubernetes.io/projected/eef2f79c-627c-4bd0-829a-8c16b1f85143-kube-api-access-zlm9l\") pod \"dns-operator-744455d44c-xqfl8\" (UID: \"eef2f79c-627c-4bd0-829a-8c16b1f85143\") " pod="openshift-dns-operator/dns-operator-744455d44c-xqfl8" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.710907 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cca75767-0e7f-42b7-8acc-fdd795b5c30e-cert\") pod \"ingress-canary-qhd4g\" (UID: \"cca75767-0e7f-42b7-8acc-fdd795b5c30e\") " pod="openshift-ingress-canary/ingress-canary-qhd4g" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.710947 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlktb\" (UniqueName: \"kubernetes.io/projected/0100b9e7-bc1f-4ada-ac18-11bba1edd54b-kube-api-access-tlktb\") pod \"catalog-operator-68c6474976-wvkz9\" (UID: \"0100b9e7-bc1f-4ada-ac18-11bba1edd54b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wvkz9" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.710968 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/956af782-f4c5-4000-9fdb-2693248d5b52-config\") pod \"kube-apiserver-operator-766d6c64bb-hxw88\" (UID: \"956af782-f4c5-4000-9fdb-2693248d5b52\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hxw88" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.710986 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/88aaa7d3-241a-422a-807b-fb64376527c4-node-bootstrap-token\") pod \"machine-config-server-ngjs9\" (UID: \"88aaa7d3-241a-422a-807b-fb64376527c4\") " pod="openshift-machine-config-operator/machine-config-server-ngjs9" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.711022 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/e383eaae-c654-4e64-be23-cb7a9cef6df7-socket-dir\") pod \"csi-hostpathplugin-p8pxx\" (UID: \"e383eaae-c654-4e64-be23-cb7a9cef6df7\") " pod="hostpath-provisioner/csi-hostpathplugin-p8pxx" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.711045 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0100b9e7-bc1f-4ada-ac18-11bba1edd54b-profile-collector-cert\") pod \"catalog-operator-68c6474976-wvkz9\" (UID: \"0100b9e7-bc1f-4ada-ac18-11bba1edd54b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wvkz9" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.711092 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/93cbfc1e-ff82-4309-a7a8-dd57f1fc6616-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-rsr6b\" (UID: \"93cbfc1e-ff82-4309-a7a8-dd57f1fc6616\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rsr6b" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.711112 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hw5cq\" (UniqueName: \"kubernetes.io/projected/ae2344f6-2b2e-4071-a215-77b7513f3138-kube-api-access-hw5cq\") pod \"machine-config-controller-84d6567774-xmjng\" (UID: \"ae2344f6-2b2e-4071-a215-77b7513f3138\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xmjng" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.711130 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mggvj\" (UniqueName: \"kubernetes.io/projected/cca75767-0e7f-42b7-8acc-fdd795b5c30e-kube-api-access-mggvj\") pod \"ingress-canary-qhd4g\" (UID: \"cca75767-0e7f-42b7-8acc-fdd795b5c30e\") " pod="openshift-ingress-canary/ingress-canary-qhd4g" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.711167 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/e383eaae-c654-4e64-be23-cb7a9cef6df7-csi-data-dir\") pod \"csi-hostpathplugin-p8pxx\" (UID: \"e383eaae-c654-4e64-be23-cb7a9cef6df7\") " pod="hostpath-provisioner/csi-hostpathplugin-p8pxx" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.711196 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbdvh\" (UniqueName: \"kubernetes.io/projected/93cbfc1e-ff82-4309-a7a8-dd57f1fc6616-kube-api-access-vbdvh\") pod \"control-plane-machine-set-operator-78cbb6b69f-rsr6b\" (UID: \"93cbfc1e-ff82-4309-a7a8-dd57f1fc6616\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rsr6b" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.711373 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e9d19072-f6f9-42da-8b86-5d6bff4b340c-registry-tls\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.711419 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e9d19072-f6f9-42da-8b86-5d6bff4b340c-bound-sa-token\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.711436 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3cf6ebcf-1dcf-4bcf-a2bb-80b4b7d7cd21-config\") pod \"service-ca-operator-777779d784-bxxl2\" (UID: \"3cf6ebcf-1dcf-4bcf-a2bb-80b4b7d7cd21\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bxxl2" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.711455 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kl4tp\" (UniqueName: \"kubernetes.io/projected/71735d28-4973-432d-9884-8979622121ec-kube-api-access-kl4tp\") pod \"package-server-manager-789f6589d5-z8tpl\" (UID: \"71735d28-4973-432d-9884-8979622121ec\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z8tpl" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.711500 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f2d129d3-f117-492c-a680-a03e1ca560e1-service-ca-bundle\") pod \"router-default-5444994796-nfv6w\" (UID: \"f2d129d3-f117-492c-a680-a03e1ca560e1\") " pod="openshift-ingress/router-default-5444994796-nfv6w" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.711519 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6acfde43-1835-4664-9b34-bacd8d98a715-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-x9xsd\" (UID: \"6acfde43-1835-4664-9b34-bacd8d98a715\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x9xsd" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.711538 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgbk5\" (UniqueName: \"kubernetes.io/projected/ebaea921-5d50-4d64-b73e-db0feab77248-kube-api-access-hgbk5\") pod \"collect-profiles-29415225-grqtz\" (UID: \"ebaea921-5d50-4d64-b73e-db0feab77248\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-grqtz" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.711873 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfvpw\" (UniqueName: \"kubernetes.io/projected/1dbf36d7-4088-4d93-90ec-5795e82dbc42-kube-api-access-hfvpw\") pod \"openshift-controller-manager-operator-756b6f6bc6-vntjw\" (UID: \"1dbf36d7-4088-4d93-90ec-5795e82dbc42\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vntjw" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.712099 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-zk4vj" Dec 05 05:54:21 crc kubenswrapper[4742]: E1205 05:54:21.712172 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:22.212148247 +0000 UTC m=+138.124283309 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.712236 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8r8c\" (UniqueName: \"kubernetes.io/projected/0f6dbf90-5722-4c03-b815-aa25831f6942-kube-api-access-b8r8c\") pod \"etcd-operator-b45778765-2cttt\" (UID: \"0f6dbf90-5722-4c03-b815-aa25831f6942\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2cttt" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.712264 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c932a965-637c-4db4-8dc4-b458856a4275-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mztl8\" (UID: \"c932a965-637c-4db4-8dc4-b458856a4275\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mztl8" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.712280 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ef9d3f4a-2357-4cf8-94d6-5c9bb31a2e5c-metrics-tls\") pod \"ingress-operator-5b745b69d9-4ndvx\" (UID: \"ef9d3f4a-2357-4cf8-94d6-5c9bb31a2e5c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4ndvx" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.712325 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clrzk\" (UniqueName: \"kubernetes.io/projected/997634d0-c379-4978-a8a5-4da39a072ff4-kube-api-access-clrzk\") pod \"marketplace-operator-79b997595-dfmr9\" (UID: \"997634d0-c379-4978-a8a5-4da39a072ff4\") " pod="openshift-marketplace/marketplace-operator-79b997595-dfmr9" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.712348 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28kw5\" (UniqueName: \"kubernetes.io/projected/e9d19072-f6f9-42da-8b86-5d6bff4b340c-kube-api-access-28kw5\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.712499 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/956af782-f4c5-4000-9fdb-2693248d5b52-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-hxw88\" (UID: \"956af782-f4c5-4000-9fdb-2693248d5b52\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hxw88" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.712609 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e6e17c17-82b4-489b-a8c3-05b44b99e427-auth-proxy-config\") pod \"machine-config-operator-74547568cd-24g5k\" (UID: \"e6e17c17-82b4-489b-a8c3-05b44b99e427\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-24g5k" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.713434 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/4eedeb03-3593-40b4-954b-76a312b87bbf-available-featuregates\") pod \"openshift-config-operator-7777fb866f-flkxk\" (UID: \"4eedeb03-3593-40b4-954b-76a312b87bbf\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-flkxk" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.713977 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/0f6dbf90-5722-4c03-b815-aa25831f6942-etcd-ca\") pod \"etcd-operator-b45778765-2cttt\" (UID: \"0f6dbf90-5722-4c03-b815-aa25831f6942\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2cttt" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.714593 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c932a965-637c-4db4-8dc4-b458856a4275-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mztl8\" (UID: \"c932a965-637c-4db4-8dc4-b458856a4275\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mztl8" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.714786 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e9d19072-f6f9-42da-8b86-5d6bff4b340c-registry-certificates\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.714979 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e9d19072-f6f9-42da-8b86-5d6bff4b340c-trusted-ca\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.715149 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0f6dbf90-5722-4c03-b815-aa25831f6942-serving-cert\") pod \"etcd-operator-b45778765-2cttt\" (UID: \"0f6dbf90-5722-4c03-b815-aa25831f6942\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2cttt" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.716786 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/956af782-f4c5-4000-9fdb-2693248d5b52-config\") pod \"kube-apiserver-operator-766d6c64bb-hxw88\" (UID: \"956af782-f4c5-4000-9fdb-2693248d5b52\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hxw88" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.719517 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28klv\" (UniqueName: \"kubernetes.io/projected/e383eaae-c654-4e64-be23-cb7a9cef6df7-kube-api-access-28klv\") pod \"csi-hostpathplugin-p8pxx\" (UID: \"e383eaae-c654-4e64-be23-cb7a9cef6df7\") " pod="hostpath-provisioner/csi-hostpathplugin-p8pxx" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.719598 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8fvzr\" (UniqueName: \"kubernetes.io/projected/e6e17c17-82b4-489b-a8c3-05b44b99e427-kube-api-access-8fvzr\") pod \"machine-config-operator-74547568cd-24g5k\" (UID: \"e6e17c17-82b4-489b-a8c3-05b44b99e427\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-24g5k" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.720435 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxvxm\" (UniqueName: \"kubernetes.io/projected/d42b86d2-579c-4fa1-aeb4-6d3d7a47798d-kube-api-access-wxvxm\") pod \"packageserver-d55dfcdfc-9wtdc\" (UID: \"d42b86d2-579c-4fa1-aeb4-6d3d7a47798d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9wtdc" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.720465 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/e6e17c17-82b4-489b-a8c3-05b44b99e427-images\") pod \"machine-config-operator-74547568cd-24g5k\" (UID: \"e6e17c17-82b4-489b-a8c3-05b44b99e427\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-24g5k" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.720512 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0d05e5b0-e7d1-4e07-923d-79473a1532d2-profile-collector-cert\") pod \"olm-operator-6b444d44fb-58gtq\" (UID: \"0d05e5b0-e7d1-4e07-923d-79473a1532d2\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-58gtq" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.720538 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gqlmg\" (UniqueName: \"kubernetes.io/projected/4eedeb03-3593-40b4-954b-76a312b87bbf-kube-api-access-gqlmg\") pod \"openshift-config-operator-7777fb866f-flkxk\" (UID: \"4eedeb03-3593-40b4-954b-76a312b87bbf\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-flkxk" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.720556 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfc7h\" (UniqueName: \"kubernetes.io/projected/9d820c8f-0796-4541-b7f9-bc7927cdbb45-kube-api-access-tfc7h\") pod \"dns-default-rd6kc\" (UID: \"9d820c8f-0796-4541-b7f9-bc7927cdbb45\") " pod="openshift-dns/dns-default-rd6kc" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.720617 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4eedeb03-3593-40b4-954b-76a312b87bbf-serving-cert\") pod \"openshift-config-operator-7777fb866f-flkxk\" (UID: \"4eedeb03-3593-40b4-954b-76a312b87bbf\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-flkxk" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.720637 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/f2d129d3-f117-492c-a680-a03e1ca560e1-default-certificate\") pod \"router-default-5444994796-nfv6w\" (UID: \"f2d129d3-f117-492c-a680-a03e1ca560e1\") " pod="openshift-ingress/router-default-5444994796-nfv6w" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.720677 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxmcm\" (UniqueName: \"kubernetes.io/projected/f2d129d3-f117-492c-a680-a03e1ca560e1-kube-api-access-zxmcm\") pod \"router-default-5444994796-nfv6w\" (UID: \"f2d129d3-f117-492c-a680-a03e1ca560e1\") " pod="openshift-ingress/router-default-5444994796-nfv6w" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.720702 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f2d129d3-f117-492c-a680-a03e1ca560e1-metrics-certs\") pod \"router-default-5444994796-nfv6w\" (UID: \"f2d129d3-f117-492c-a680-a03e1ca560e1\") " pod="openshift-ingress/router-default-5444994796-nfv6w" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.720752 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9l85h\" (UniqueName: \"kubernetes.io/projected/ef9d3f4a-2357-4cf8-94d6-5c9bb31a2e5c-kube-api-access-9l85h\") pod \"ingress-operator-5b745b69d9-4ndvx\" (UID: \"ef9d3f4a-2357-4cf8-94d6-5c9bb31a2e5c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4ndvx" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.721539 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1dbf36d7-4088-4d93-90ec-5795e82dbc42-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-vntjw\" (UID: \"1dbf36d7-4088-4d93-90ec-5795e82dbc42\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vntjw" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.721658 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/e6e17c17-82b4-489b-a8c3-05b44b99e427-images\") pod \"machine-config-operator-74547568cd-24g5k\" (UID: \"e6e17c17-82b4-489b-a8c3-05b44b99e427\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-24g5k" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.722314 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f2d129d3-f117-492c-a680-a03e1ca560e1-service-ca-bundle\") pod \"router-default-5444994796-nfv6w\" (UID: \"f2d129d3-f117-492c-a680-a03e1ca560e1\") " pod="openshift-ingress/router-default-5444994796-nfv6w" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.722499 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0f6dbf90-5722-4c03-b815-aa25831f6942-etcd-client\") pod \"etcd-operator-b45778765-2cttt\" (UID: \"0f6dbf90-5722-4c03-b815-aa25831f6942\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2cttt" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.722838 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/f2d129d3-f117-492c-a680-a03e1ca560e1-stats-auth\") pod \"router-default-5444994796-nfv6w\" (UID: \"f2d129d3-f117-492c-a680-a03e1ca560e1\") " pod="openshift-ingress/router-default-5444994796-nfv6w" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.722948 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/0f6dbf90-5722-4c03-b815-aa25831f6942-etcd-service-ca\") pod \"etcd-operator-b45778765-2cttt\" (UID: \"0f6dbf90-5722-4c03-b815-aa25831f6942\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2cttt" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.723195 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c932a965-637c-4db4-8dc4-b458856a4275-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mztl8\" (UID: \"c932a965-637c-4db4-8dc4-b458856a4275\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mztl8" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.723519 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e6e17c17-82b4-489b-a8c3-05b44b99e427-auth-proxy-config\") pod \"machine-config-operator-74547568cd-24g5k\" (UID: \"e6e17c17-82b4-489b-a8c3-05b44b99e427\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-24g5k" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.723573 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e6e17c17-82b4-489b-a8c3-05b44b99e427-proxy-tls\") pod \"machine-config-operator-74547568cd-24g5k\" (UID: \"e6e17c17-82b4-489b-a8c3-05b44b99e427\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-24g5k" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.723593 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/afacd0cf-c997-4688-bc5c-17c8f729f9c9-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-k5bqg\" (UID: \"afacd0cf-c997-4688-bc5c-17c8f729f9c9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k5bqg" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.724028 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f6dbf90-5722-4c03-b815-aa25831f6942-config\") pod \"etcd-operator-b45778765-2cttt\" (UID: \"0f6dbf90-5722-4c03-b815-aa25831f6942\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2cttt" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.724692 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/afacd0cf-c997-4688-bc5c-17c8f729f9c9-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-k5bqg\" (UID: \"afacd0cf-c997-4688-bc5c-17c8f729f9c9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k5bqg" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.726144 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e9d19072-f6f9-42da-8b86-5d6bff4b340c-ca-trust-extracted\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.726281 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/57ef3310-7e27-4216-9fa1-ccaa1c61cd4a-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-cqzqd\" (UID: \"57ef3310-7e27-4216-9fa1-ccaa1c61cd4a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-cqzqd" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.727496 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6acfde43-1835-4664-9b34-bacd8d98a715-config\") pod \"kube-controller-manager-operator-78b949d7b-x9xsd\" (UID: \"6acfde43-1835-4664-9b34-bacd8d98a715\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x9xsd" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.727620 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/f2d129d3-f117-492c-a680-a03e1ca560e1-default-certificate\") pod \"router-default-5444994796-nfv6w\" (UID: \"f2d129d3-f117-492c-a680-a03e1ca560e1\") " pod="openshift-ingress/router-default-5444994796-nfv6w" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.728340 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/956af782-f4c5-4000-9fdb-2693248d5b52-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-hxw88\" (UID: \"956af782-f4c5-4000-9fdb-2693248d5b52\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hxw88" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.728774 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e9d19072-f6f9-42da-8b86-5d6bff4b340c-installation-pull-secrets\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.729100 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ae2344f6-2b2e-4071-a215-77b7513f3138-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-xmjng\" (UID: \"ae2344f6-2b2e-4071-a215-77b7513f3138\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xmjng" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.729633 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/ae2344f6-2b2e-4071-a215-77b7513f3138-proxy-tls\") pod \"machine-config-controller-84d6567774-xmjng\" (UID: \"ae2344f6-2b2e-4071-a215-77b7513f3138\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xmjng" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.729647 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e9d19072-f6f9-42da-8b86-5d6bff4b340c-registry-tls\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.731269 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ef9d3f4a-2357-4cf8-94d6-5c9bb31a2e5c-trusted-ca\") pod \"ingress-operator-5b745b69d9-4ndvx\" (UID: \"ef9d3f4a-2357-4cf8-94d6-5c9bb31a2e5c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4ndvx" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.733125 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1dbf36d7-4088-4d93-90ec-5795e82dbc42-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-vntjw\" (UID: \"1dbf36d7-4088-4d93-90ec-5795e82dbc42\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vntjw" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.733559 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4eedeb03-3593-40b4-954b-76a312b87bbf-serving-cert\") pod \"openshift-config-operator-7777fb866f-flkxk\" (UID: \"4eedeb03-3593-40b4-954b-76a312b87bbf\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-flkxk" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.733655 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/afacd0cf-c997-4688-bc5c-17c8f729f9c9-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-k5bqg\" (UID: \"afacd0cf-c997-4688-bc5c-17c8f729f9c9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k5bqg" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.733974 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e6e17c17-82b4-489b-a8c3-05b44b99e427-proxy-tls\") pod \"machine-config-operator-74547568cd-24g5k\" (UID: \"e6e17c17-82b4-489b-a8c3-05b44b99e427\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-24g5k" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.734111 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/93cbfc1e-ff82-4309-a7a8-dd57f1fc6616-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-rsr6b\" (UID: \"93cbfc1e-ff82-4309-a7a8-dd57f1fc6616\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rsr6b" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.735519 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/eef2f79c-627c-4bd0-829a-8c16b1f85143-metrics-tls\") pod \"dns-operator-744455d44c-xqfl8\" (UID: \"eef2f79c-627c-4bd0-829a-8c16b1f85143\") " pod="openshift-dns-operator/dns-operator-744455d44c-xqfl8" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.737616 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ef9d3f4a-2357-4cf8-94d6-5c9bb31a2e5c-metrics-tls\") pod \"ingress-operator-5b745b69d9-4ndvx\" (UID: \"ef9d3f4a-2357-4cf8-94d6-5c9bb31a2e5c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4ndvx" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.737865 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6acfde43-1835-4664-9b34-bacd8d98a715-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-x9xsd\" (UID: \"6acfde43-1835-4664-9b34-bacd8d98a715\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x9xsd" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.740125 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/f2d129d3-f117-492c-a680-a03e1ca560e1-metrics-certs\") pod \"router-default-5444994796-nfv6w\" (UID: \"f2d129d3-f117-492c-a680-a03e1ca560e1\") " pod="openshift-ingress/router-default-5444994796-nfv6w" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.743787 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.756496 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbdvh\" (UniqueName: \"kubernetes.io/projected/93cbfc1e-ff82-4309-a7a8-dd57f1fc6616-kube-api-access-vbdvh\") pod \"control-plane-machine-set-operator-78cbb6b69f-rsr6b\" (UID: \"93cbfc1e-ff82-4309-a7a8-dd57f1fc6616\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rsr6b" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.774375 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfvpw\" (UniqueName: \"kubernetes.io/projected/1dbf36d7-4088-4d93-90ec-5795e82dbc42-kube-api-access-hfvpw\") pod \"openshift-controller-manager-operator-756b6f6bc6-vntjw\" (UID: \"1dbf36d7-4088-4d93-90ec-5795e82dbc42\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vntjw" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.778316 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rbzzz"] Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.783444 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.796406 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftpzm\" (UniqueName: \"kubernetes.io/projected/57ef3310-7e27-4216-9fa1-ccaa1c61cd4a-kube-api-access-ftpzm\") pod \"cluster-samples-operator-665b6dd947-cqzqd\" (UID: \"57ef3310-7e27-4216-9fa1-ccaa1c61cd4a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-cqzqd" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.814559 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/afacd0cf-c997-4688-bc5c-17c8f729f9c9-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-k5bqg\" (UID: \"afacd0cf-c997-4688-bc5c-17c8f729f9c9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k5bqg" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825010 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/930e305a-35d6-4053-8064-58fb2662d8b0-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-fpfzc\" (UID: \"930e305a-35d6-4053-8064-58fb2662d8b0\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fpfzc" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825041 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/e383eaae-c654-4e64-be23-cb7a9cef6df7-plugins-dir\") pod \"csi-hostpathplugin-p8pxx\" (UID: \"e383eaae-c654-4e64-be23-cb7a9cef6df7\") " pod="hostpath-provisioner/csi-hostpathplugin-p8pxx" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825076 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3cf6ebcf-1dcf-4bcf-a2bb-80b4b7d7cd21-serving-cert\") pod \"service-ca-operator-777779d784-bxxl2\" (UID: \"3cf6ebcf-1dcf-4bcf-a2bb-80b4b7d7cd21\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bxxl2" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825099 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cca75767-0e7f-42b7-8acc-fdd795b5c30e-cert\") pod \"ingress-canary-qhd4g\" (UID: \"cca75767-0e7f-42b7-8acc-fdd795b5c30e\") " pod="openshift-ingress-canary/ingress-canary-qhd4g" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825116 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlktb\" (UniqueName: \"kubernetes.io/projected/0100b9e7-bc1f-4ada-ac18-11bba1edd54b-kube-api-access-tlktb\") pod \"catalog-operator-68c6474976-wvkz9\" (UID: \"0100b9e7-bc1f-4ada-ac18-11bba1edd54b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wvkz9" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825133 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/88aaa7d3-241a-422a-807b-fb64376527c4-node-bootstrap-token\") pod \"machine-config-server-ngjs9\" (UID: \"88aaa7d3-241a-422a-807b-fb64376527c4\") " pod="openshift-machine-config-operator/machine-config-server-ngjs9" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825147 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/e383eaae-c654-4e64-be23-cb7a9cef6df7-socket-dir\") pod \"csi-hostpathplugin-p8pxx\" (UID: \"e383eaae-c654-4e64-be23-cb7a9cef6df7\") " pod="hostpath-provisioner/csi-hostpathplugin-p8pxx" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825163 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0100b9e7-bc1f-4ada-ac18-11bba1edd54b-profile-collector-cert\") pod \"catalog-operator-68c6474976-wvkz9\" (UID: \"0100b9e7-bc1f-4ada-ac18-11bba1edd54b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wvkz9" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825190 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mggvj\" (UniqueName: \"kubernetes.io/projected/cca75767-0e7f-42b7-8acc-fdd795b5c30e-kube-api-access-mggvj\") pod \"ingress-canary-qhd4g\" (UID: \"cca75767-0e7f-42b7-8acc-fdd795b5c30e\") " pod="openshift-ingress-canary/ingress-canary-qhd4g" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825206 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/e383eaae-c654-4e64-be23-cb7a9cef6df7-csi-data-dir\") pod \"csi-hostpathplugin-p8pxx\" (UID: \"e383eaae-c654-4e64-be23-cb7a9cef6df7\") " pod="hostpath-provisioner/csi-hostpathplugin-p8pxx" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825225 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3cf6ebcf-1dcf-4bcf-a2bb-80b4b7d7cd21-config\") pod \"service-ca-operator-777779d784-bxxl2\" (UID: \"3cf6ebcf-1dcf-4bcf-a2bb-80b4b7d7cd21\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bxxl2" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825240 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kl4tp\" (UniqueName: \"kubernetes.io/projected/71735d28-4973-432d-9884-8979622121ec-kube-api-access-kl4tp\") pod \"package-server-manager-789f6589d5-z8tpl\" (UID: \"71735d28-4973-432d-9884-8979622121ec\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z8tpl" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825270 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgbk5\" (UniqueName: \"kubernetes.io/projected/ebaea921-5d50-4d64-b73e-db0feab77248-kube-api-access-hgbk5\") pod \"collect-profiles-29415225-grqtz\" (UID: \"ebaea921-5d50-4d64-b73e-db0feab77248\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-grqtz" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825291 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clrzk\" (UniqueName: \"kubernetes.io/projected/997634d0-c379-4978-a8a5-4da39a072ff4-kube-api-access-clrzk\") pod \"marketplace-operator-79b997595-dfmr9\" (UID: \"997634d0-c379-4978-a8a5-4da39a072ff4\") " pod="openshift-marketplace/marketplace-operator-79b997595-dfmr9" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825319 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxvxm\" (UniqueName: \"kubernetes.io/projected/d42b86d2-579c-4fa1-aeb4-6d3d7a47798d-kube-api-access-wxvxm\") pod \"packageserver-d55dfcdfc-9wtdc\" (UID: \"d42b86d2-579c-4fa1-aeb4-6d3d7a47798d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9wtdc" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825351 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28klv\" (UniqueName: \"kubernetes.io/projected/e383eaae-c654-4e64-be23-cb7a9cef6df7-kube-api-access-28klv\") pod \"csi-hostpathplugin-p8pxx\" (UID: \"e383eaae-c654-4e64-be23-cb7a9cef6df7\") " pod="hostpath-provisioner/csi-hostpathplugin-p8pxx" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825369 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0d05e5b0-e7d1-4e07-923d-79473a1532d2-profile-collector-cert\") pod \"olm-operator-6b444d44fb-58gtq\" (UID: \"0d05e5b0-e7d1-4e07-923d-79473a1532d2\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-58gtq" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825385 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfc7h\" (UniqueName: \"kubernetes.io/projected/9d820c8f-0796-4541-b7f9-bc7927cdbb45-kube-api-access-tfc7h\") pod \"dns-default-rd6kc\" (UID: \"9d820c8f-0796-4541-b7f9-bc7927cdbb45\") " pod="openshift-dns/dns-default-rd6kc" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825424 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/d42b86d2-579c-4fa1-aeb4-6d3d7a47798d-tmpfs\") pod \"packageserver-d55dfcdfc-9wtdc\" (UID: \"d42b86d2-579c-4fa1-aeb4-6d3d7a47798d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9wtdc" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825445 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w25rr\" (UniqueName: \"kubernetes.io/projected/930e305a-35d6-4053-8064-58fb2662d8b0-kube-api-access-w25rr\") pod \"multus-admission-controller-857f4d67dd-fpfzc\" (UID: \"930e305a-35d6-4053-8064-58fb2662d8b0\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fpfzc" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825461 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/2a89062e-1da0-4abd-a415-92c6fd9e76f4-signing-cabundle\") pod \"service-ca-9c57cc56f-7jwfc\" (UID: \"2a89062e-1da0-4abd-a415-92c6fd9e76f4\") " pod="openshift-service-ca/service-ca-9c57cc56f-7jwfc" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825477 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/e383eaae-c654-4e64-be23-cb7a9cef6df7-mountpoint-dir\") pod \"csi-hostpathplugin-p8pxx\" (UID: \"e383eaae-c654-4e64-be23-cb7a9cef6df7\") " pod="hostpath-provisioner/csi-hostpathplugin-p8pxx" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825495 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ljxw\" (UniqueName: \"kubernetes.io/projected/3cf6ebcf-1dcf-4bcf-a2bb-80b4b7d7cd21-kube-api-access-7ljxw\") pod \"service-ca-operator-777779d784-bxxl2\" (UID: \"3cf6ebcf-1dcf-4bcf-a2bb-80b4b7d7cd21\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bxxl2" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825511 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/997634d0-c379-4978-a8a5-4da39a072ff4-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-dfmr9\" (UID: \"997634d0-c379-4978-a8a5-4da39a072ff4\") " pod="openshift-marketplace/marketplace-operator-79b997595-dfmr9" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825525 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/e383eaae-c654-4e64-be23-cb7a9cef6df7-registration-dir\") pod \"csi-hostpathplugin-p8pxx\" (UID: \"e383eaae-c654-4e64-be23-cb7a9cef6df7\") " pod="hostpath-provisioner/csi-hostpathplugin-p8pxx" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825543 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0d05e5b0-e7d1-4e07-923d-79473a1532d2-srv-cert\") pod \"olm-operator-6b444d44fb-58gtq\" (UID: \"0d05e5b0-e7d1-4e07-923d-79473a1532d2\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-58gtq" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825558 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/997634d0-c379-4978-a8a5-4da39a072ff4-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-dfmr9\" (UID: \"997634d0-c379-4978-a8a5-4da39a072ff4\") " pod="openshift-marketplace/marketplace-operator-79b997595-dfmr9" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825571 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/2a89062e-1da0-4abd-a415-92c6fd9e76f4-signing-key\") pod \"service-ca-9c57cc56f-7jwfc\" (UID: \"2a89062e-1da0-4abd-a415-92c6fd9e76f4\") " pod="openshift-service-ca/service-ca-9c57cc56f-7jwfc" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825587 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ebaea921-5d50-4d64-b73e-db0feab77248-config-volume\") pod \"collect-profiles-29415225-grqtz\" (UID: \"ebaea921-5d50-4d64-b73e-db0feab77248\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-grqtz" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825601 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/88aaa7d3-241a-422a-807b-fb64376527c4-certs\") pod \"machine-config-server-ngjs9\" (UID: \"88aaa7d3-241a-422a-807b-fb64376527c4\") " pod="openshift-machine-config-operator/machine-config-server-ngjs9" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825620 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/71735d28-4973-432d-9884-8979622121ec-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-z8tpl\" (UID: \"71735d28-4973-432d-9884-8979622121ec\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z8tpl" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825641 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6khn\" (UniqueName: \"kubernetes.io/projected/88aaa7d3-241a-422a-807b-fb64376527c4-kube-api-access-s6khn\") pod \"machine-config-server-ngjs9\" (UID: \"88aaa7d3-241a-422a-807b-fb64376527c4\") " pod="openshift-machine-config-operator/machine-config-server-ngjs9" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825661 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9d820c8f-0796-4541-b7f9-bc7927cdbb45-metrics-tls\") pod \"dns-default-rd6kc\" (UID: \"9d820c8f-0796-4541-b7f9-bc7927cdbb45\") " pod="openshift-dns/dns-default-rd6kc" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825677 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pf9h9\" (UniqueName: \"kubernetes.io/projected/0d05e5b0-e7d1-4e07-923d-79473a1532d2-kube-api-access-pf9h9\") pod \"olm-operator-6b444d44fb-58gtq\" (UID: \"0d05e5b0-e7d1-4e07-923d-79473a1532d2\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-58gtq" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825692 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d42b86d2-579c-4fa1-aeb4-6d3d7a47798d-apiservice-cert\") pod \"packageserver-d55dfcdfc-9wtdc\" (UID: \"d42b86d2-579c-4fa1-aeb4-6d3d7a47798d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9wtdc" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825728 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d42b86d2-579c-4fa1-aeb4-6d3d7a47798d-webhook-cert\") pod \"packageserver-d55dfcdfc-9wtdc\" (UID: \"d42b86d2-579c-4fa1-aeb4-6d3d7a47798d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9wtdc" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825744 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9d820c8f-0796-4541-b7f9-bc7927cdbb45-config-volume\") pod \"dns-default-rd6kc\" (UID: \"9d820c8f-0796-4541-b7f9-bc7927cdbb45\") " pod="openshift-dns/dns-default-rd6kc" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825759 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ebaea921-5d50-4d64-b73e-db0feab77248-secret-volume\") pod \"collect-profiles-29415225-grqtz\" (UID: \"ebaea921-5d50-4d64-b73e-db0feab77248\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-grqtz" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825774 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9kkww\" (UniqueName: \"kubernetes.io/projected/2a89062e-1da0-4abd-a415-92c6fd9e76f4-kube-api-access-9kkww\") pod \"service-ca-9c57cc56f-7jwfc\" (UID: \"2a89062e-1da0-4abd-a415-92c6fd9e76f4\") " pod="openshift-service-ca/service-ca-9c57cc56f-7jwfc" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825795 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.825809 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0100b9e7-bc1f-4ada-ac18-11bba1edd54b-srv-cert\") pod \"catalog-operator-68c6474976-wvkz9\" (UID: \"0100b9e7-bc1f-4ada-ac18-11bba1edd54b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wvkz9" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.826523 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/e383eaae-c654-4e64-be23-cb7a9cef6df7-csi-data-dir\") pod \"csi-hostpathplugin-p8pxx\" (UID: \"e383eaae-c654-4e64-be23-cb7a9cef6df7\") " pod="hostpath-provisioner/csi-hostpathplugin-p8pxx" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.828556 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0100b9e7-bc1f-4ada-ac18-11bba1edd54b-srv-cert\") pod \"catalog-operator-68c6474976-wvkz9\" (UID: \"0100b9e7-bc1f-4ada-ac18-11bba1edd54b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wvkz9" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.828880 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/e383eaae-c654-4e64-be23-cb7a9cef6df7-plugins-dir\") pod \"csi-hostpathplugin-p8pxx\" (UID: \"e383eaae-c654-4e64-be23-cb7a9cef6df7\") " pod="hostpath-provisioner/csi-hostpathplugin-p8pxx" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.829004 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/e383eaae-c654-4e64-be23-cb7a9cef6df7-socket-dir\") pod \"csi-hostpathplugin-p8pxx\" (UID: \"e383eaae-c654-4e64-be23-cb7a9cef6df7\") " pod="hostpath-provisioner/csi-hostpathplugin-p8pxx" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.829818 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/e383eaae-c654-4e64-be23-cb7a9cef6df7-registration-dir\") pod \"csi-hostpathplugin-p8pxx\" (UID: \"e383eaae-c654-4e64-be23-cb7a9cef6df7\") " pod="hostpath-provisioner/csi-hostpathplugin-p8pxx" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.831003 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/2a89062e-1da0-4abd-a415-92c6fd9e76f4-signing-cabundle\") pod \"service-ca-9c57cc56f-7jwfc\" (UID: \"2a89062e-1da0-4abd-a415-92c6fd9e76f4\") " pod="openshift-service-ca/service-ca-9c57cc56f-7jwfc" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.831631 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/71735d28-4973-432d-9884-8979622121ec-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-z8tpl\" (UID: \"71735d28-4973-432d-9884-8979622121ec\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z8tpl" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.831776 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9d820c8f-0796-4541-b7f9-bc7927cdbb45-config-volume\") pod \"dns-default-rd6kc\" (UID: \"9d820c8f-0796-4541-b7f9-bc7927cdbb45\") " pod="openshift-dns/dns-default-rd6kc" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.831939 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/d42b86d2-579c-4fa1-aeb4-6d3d7a47798d-tmpfs\") pod \"packageserver-d55dfcdfc-9wtdc\" (UID: \"d42b86d2-579c-4fa1-aeb4-6d3d7a47798d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9wtdc" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.832152 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3cf6ebcf-1dcf-4bcf-a2bb-80b4b7d7cd21-config\") pod \"service-ca-operator-777779d784-bxxl2\" (UID: \"3cf6ebcf-1dcf-4bcf-a2bb-80b4b7d7cd21\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bxxl2" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.832622 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/e383eaae-c654-4e64-be23-cb7a9cef6df7-mountpoint-dir\") pod \"csi-hostpathplugin-p8pxx\" (UID: \"e383eaae-c654-4e64-be23-cb7a9cef6df7\") " pod="hostpath-provisioner/csi-hostpathplugin-p8pxx" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.832843 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/88aaa7d3-241a-422a-807b-fb64376527c4-certs\") pod \"machine-config-server-ngjs9\" (UID: \"88aaa7d3-241a-422a-807b-fb64376527c4\") " pod="openshift-machine-config-operator/machine-config-server-ngjs9" Dec 05 05:54:21 crc kubenswrapper[4742]: E1205 05:54:21.833039 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:22.333022433 +0000 UTC m=+138.245157565 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.833669 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/997634d0-c379-4978-a8a5-4da39a072ff4-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-dfmr9\" (UID: \"997634d0-c379-4978-a8a5-4da39a072ff4\") " pod="openshift-marketplace/marketplace-operator-79b997595-dfmr9" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.834407 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/88aaa7d3-241a-422a-807b-fb64376527c4-node-bootstrap-token\") pod \"machine-config-server-ngjs9\" (UID: \"88aaa7d3-241a-422a-807b-fb64376527c4\") " pod="openshift-machine-config-operator/machine-config-server-ngjs9" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.834573 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0d05e5b0-e7d1-4e07-923d-79473a1532d2-profile-collector-cert\") pod \"olm-operator-6b444d44fb-58gtq\" (UID: \"0d05e5b0-e7d1-4e07-923d-79473a1532d2\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-58gtq" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.834896 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/930e305a-35d6-4053-8064-58fb2662d8b0-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-fpfzc\" (UID: \"930e305a-35d6-4053-8064-58fb2662d8b0\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fpfzc" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.835815 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ebaea921-5d50-4d64-b73e-db0feab77248-config-volume\") pod \"collect-profiles-29415225-grqtz\" (UID: \"ebaea921-5d50-4d64-b73e-db0feab77248\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-grqtz" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.835822 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0100b9e7-bc1f-4ada-ac18-11bba1edd54b-profile-collector-cert\") pod \"catalog-operator-68c6474976-wvkz9\" (UID: \"0100b9e7-bc1f-4ada-ac18-11bba1edd54b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wvkz9" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.837967 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/997634d0-c379-4978-a8a5-4da39a072ff4-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-dfmr9\" (UID: \"997634d0-c379-4978-a8a5-4da39a072ff4\") " pod="openshift-marketplace/marketplace-operator-79b997595-dfmr9" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.840030 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3cf6ebcf-1dcf-4bcf-a2bb-80b4b7d7cd21-serving-cert\") pod \"service-ca-operator-777779d784-bxxl2\" (UID: \"3cf6ebcf-1dcf-4bcf-a2bb-80b4b7d7cd21\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bxxl2" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.840032 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cca75767-0e7f-42b7-8acc-fdd795b5c30e-cert\") pod \"ingress-canary-qhd4g\" (UID: \"cca75767-0e7f-42b7-8acc-fdd795b5c30e\") " pod="openshift-ingress-canary/ingress-canary-qhd4g" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.840279 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ebaea921-5d50-4d64-b73e-db0feab77248-secret-volume\") pod \"collect-profiles-29415225-grqtz\" (UID: \"ebaea921-5d50-4d64-b73e-db0feab77248\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-grqtz" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.840685 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d42b86d2-579c-4fa1-aeb4-6d3d7a47798d-apiservice-cert\") pod \"packageserver-d55dfcdfc-9wtdc\" (UID: \"d42b86d2-579c-4fa1-aeb4-6d3d7a47798d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9wtdc" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.841101 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/2a89062e-1da0-4abd-a415-92c6fd9e76f4-signing-key\") pod \"service-ca-9c57cc56f-7jwfc\" (UID: \"2a89062e-1da0-4abd-a415-92c6fd9e76f4\") " pod="openshift-service-ca/service-ca-9c57cc56f-7jwfc" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.841406 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9d820c8f-0796-4541-b7f9-bc7927cdbb45-metrics-tls\") pod \"dns-default-rd6kc\" (UID: \"9d820c8f-0796-4541-b7f9-bc7927cdbb45\") " pod="openshift-dns/dns-default-rd6kc" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.843924 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/956af782-f4c5-4000-9fdb-2693248d5b52-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-hxw88\" (UID: \"956af782-f4c5-4000-9fdb-2693248d5b52\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hxw88" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.856493 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rtbh\" (UniqueName: \"kubernetes.io/projected/afacd0cf-c997-4688-bc5c-17c8f729f9c9-kube-api-access-4rtbh\") pod \"cluster-image-registry-operator-dc59b4c8b-k5bqg\" (UID: \"afacd0cf-c997-4688-bc5c-17c8f729f9c9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k5bqg" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.856883 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0d05e5b0-e7d1-4e07-923d-79473a1532d2-srv-cert\") pod \"olm-operator-6b444d44fb-58gtq\" (UID: \"0d05e5b0-e7d1-4e07-923d-79473a1532d2\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-58gtq" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.862288 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d42b86d2-579c-4fa1-aeb4-6d3d7a47798d-webhook-cert\") pod \"packageserver-d55dfcdfc-9wtdc\" (UID: \"d42b86d2-579c-4fa1-aeb4-6d3d7a47798d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9wtdc" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.874108 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlm9l\" (UniqueName: \"kubernetes.io/projected/eef2f79c-627c-4bd0-829a-8c16b1f85143-kube-api-access-zlm9l\") pod \"dns-operator-744455d44c-xqfl8\" (UID: \"eef2f79c-627c-4bd0-829a-8c16b1f85143\") " pod="openshift-dns-operator/dns-operator-744455d44c-xqfl8" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.922754 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6acfde43-1835-4664-9b34-bacd8d98a715-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-x9xsd\" (UID: \"6acfde43-1835-4664-9b34-bacd8d98a715\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x9xsd" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.923987 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hw5cq\" (UniqueName: \"kubernetes.io/projected/ae2344f6-2b2e-4071-a215-77b7513f3138-kube-api-access-hw5cq\") pod \"machine-config-controller-84d6567774-xmjng\" (UID: \"ae2344f6-2b2e-4071-a215-77b7513f3138\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xmjng" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.927507 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:21 crc kubenswrapper[4742]: E1205 05:54:21.927921 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:22.427908064 +0000 UTC m=+138.340043126 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.938026 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-xqfl8" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.947298 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hxw88" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.948231 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8r8c\" (UniqueName: \"kubernetes.io/projected/0f6dbf90-5722-4c03-b815-aa25831f6942-kube-api-access-b8r8c\") pod \"etcd-operator-b45778765-2cttt\" (UID: \"0f6dbf90-5722-4c03-b815-aa25831f6942\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2cttt" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.955485 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x9xsd" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.961600 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c932a965-637c-4db4-8dc4-b458856a4275-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mztl8\" (UID: \"c932a965-637c-4db4-8dc4-b458856a4275\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mztl8" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.961991 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rsr6b" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.975004 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28kw5\" (UniqueName: \"kubernetes.io/projected/e9d19072-f6f9-42da-8b86-5d6bff4b340c-kube-api-access-28kw5\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.982273 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vntjw" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.989665 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xmjng" Dec 05 05:54:21 crc kubenswrapper[4742]: I1205 05:54:21.992813 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8fvzr\" (UniqueName: \"kubernetes.io/projected/e6e17c17-82b4-489b-a8c3-05b44b99e427-kube-api-access-8fvzr\") pod \"machine-config-operator-74547568cd-24g5k\" (UID: \"e6e17c17-82b4-489b-a8c3-05b44b99e427\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-24g5k" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.005734 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-cqzqd" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.013693 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-24g5k" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.017701 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mztl8" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.018020 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gqlmg\" (UniqueName: \"kubernetes.io/projected/4eedeb03-3593-40b4-954b-76a312b87bbf-kube-api-access-gqlmg\") pod \"openshift-config-operator-7777fb866f-flkxk\" (UID: \"4eedeb03-3593-40b4-954b-76a312b87bbf\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-flkxk" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.029233 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.029911 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-778cz"] Dec 05 05:54:22 crc kubenswrapper[4742]: E1205 05:54:22.030522 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:22.530491267 +0000 UTC m=+138.442626329 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.041166 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-79dwh"] Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.041210 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-d6dsw"] Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.042951 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-2279c"] Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.076866 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2sbt\" (UniqueName: \"kubernetes.io/projected/0f7b345c-76d4-4f77-9b9d-7a9678976492-kube-api-access-w2sbt\") pod \"migrator-59844c95c7-7j9tp\" (UID: \"0f7b345c-76d4-4f77-9b9d-7a9678976492\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7j9tp" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.092276 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-45686"] Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.106432 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-vfstf"] Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.110231 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n"] Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.113868 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxmcm\" (UniqueName: \"kubernetes.io/projected/f2d129d3-f117-492c-a680-a03e1ca560e1-kube-api-access-zxmcm\") pod \"router-default-5444994796-nfv6w\" (UID: \"f2d129d3-f117-492c-a680-a03e1ca560e1\") " pod="openshift-ingress/router-default-5444994796-nfv6w" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.123751 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-gn9cx"] Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.130834 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:22 crc kubenswrapper[4742]: E1205 05:54:22.130922 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:22.630903617 +0000 UTC m=+138.543038679 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.131166 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:22 crc kubenswrapper[4742]: E1205 05:54:22.131410 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:22.631402684 +0000 UTC m=+138.543537746 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.133049 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e9d19072-f6f9-42da-8b86-5d6bff4b340c-bound-sa-token\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.136001 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ef9d3f4a-2357-4cf8-94d6-5c9bb31a2e5c-bound-sa-token\") pod \"ingress-operator-5b745b69d9-4ndvx\" (UID: \"ef9d3f4a-2357-4cf8-94d6-5c9bb31a2e5c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4ndvx" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.137159 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9l85h\" (UniqueName: \"kubernetes.io/projected/ef9d3f4a-2357-4cf8-94d6-5c9bb31a2e5c-kube-api-access-9l85h\") pod \"ingress-operator-5b745b69d9-4ndvx\" (UID: \"ef9d3f4a-2357-4cf8-94d6-5c9bb31a2e5c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4ndvx" Dec 05 05:54:22 crc kubenswrapper[4742]: W1205 05:54:22.145426 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddf7f2c2a_0105_489e_9087_3fbf406856a9.slice/crio-233204082e7a476ca0d8be0c285da32a2e3650f4b5371d69d337c0e50452d322 WatchSource:0}: Error finding container 233204082e7a476ca0d8be0c285da32a2e3650f4b5371d69d337c0e50452d322: Status 404 returned error can't find the container with id 233204082e7a476ca0d8be0c285da32a2e3650f4b5371d69d337c0e50452d322 Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.153051 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w25rr\" (UniqueName: \"kubernetes.io/projected/930e305a-35d6-4053-8064-58fb2662d8b0-kube-api-access-w25rr\") pod \"multus-admission-controller-857f4d67dd-fpfzc\" (UID: \"930e305a-35d6-4053-8064-58fb2662d8b0\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-fpfzc" Dec 05 05:54:22 crc kubenswrapper[4742]: W1205 05:54:22.153183 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod51f64621_97df_411c_bb21_c24a7c2976be.slice/crio-6e7dc83c704db01324b66e54b7eb41d412c23b8a5949423ed7871ac2d514af7e WatchSource:0}: Error finding container 6e7dc83c704db01324b66e54b7eb41d412c23b8a5949423ed7871ac2d514af7e: Status 404 returned error can't find the container with id 6e7dc83c704db01324b66e54b7eb41d412c23b8a5949423ed7871ac2d514af7e Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.153356 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k5bqg" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.168383 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-flkxk" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.170600 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-nfv6w" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.179370 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mggvj\" (UniqueName: \"kubernetes.io/projected/cca75767-0e7f-42b7-8acc-fdd795b5c30e-kube-api-access-mggvj\") pod \"ingress-canary-qhd4g\" (UID: \"cca75767-0e7f-42b7-8acc-fdd795b5c30e\") " pod="openshift-ingress-canary/ingress-canary-qhd4g" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.189890 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-xqfl8"] Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.203143 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlktb\" (UniqueName: \"kubernetes.io/projected/0100b9e7-bc1f-4ada-ac18-11bba1edd54b-kube-api-access-tlktb\") pod \"catalog-operator-68c6474976-wvkz9\" (UID: \"0100b9e7-bc1f-4ada-ac18-11bba1edd54b\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wvkz9" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.210079 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxvxm\" (UniqueName: \"kubernetes.io/projected/d42b86d2-579c-4fa1-aeb4-6d3d7a47798d-kube-api-access-wxvxm\") pod \"packageserver-d55dfcdfc-9wtdc\" (UID: \"d42b86d2-579c-4fa1-aeb4-6d3d7a47798d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9wtdc" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.216548 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-zk4vj"] Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.217927 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s8jnz"] Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.222333 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-2cttt" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.226758 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4ndvx" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.227948 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hxw88"] Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.232296 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfc7h\" (UniqueName: \"kubernetes.io/projected/9d820c8f-0796-4541-b7f9-bc7927cdbb45-kube-api-access-tfc7h\") pod \"dns-default-rd6kc\" (UID: \"9d820c8f-0796-4541-b7f9-bc7927cdbb45\") " pod="openshift-dns/dns-default-rd6kc" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.233483 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:22 crc kubenswrapper[4742]: E1205 05:54:22.233631 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:22.733611634 +0000 UTC m=+138.645746696 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.233774 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:22 crc kubenswrapper[4742]: E1205 05:54:22.234201 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:22.734190194 +0000 UTC m=+138.646325256 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.268873 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6khn\" (UniqueName: \"kubernetes.io/projected/88aaa7d3-241a-422a-807b-fb64376527c4-kube-api-access-s6khn\") pod \"machine-config-server-ngjs9\" (UID: \"88aaa7d3-241a-422a-807b-fb64376527c4\") " pod="openshift-machine-config-operator/machine-config-server-ngjs9" Dec 05 05:54:22 crc kubenswrapper[4742]: W1205 05:54:22.278067 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod956af782_f4c5_4000_9fdb_2693248d5b52.slice/crio-6b29f3cf5ad2f02d93fa86ae54a0107c53c6ea2b7df69048de123b8f6091d7d4 WatchSource:0}: Error finding container 6b29f3cf5ad2f02d93fa86ae54a0107c53c6ea2b7df69048de123b8f6091d7d4: Status 404 returned error can't find the container with id 6b29f3cf5ad2f02d93fa86ae54a0107c53c6ea2b7df69048de123b8f6091d7d4 Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.280362 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28klv\" (UniqueName: \"kubernetes.io/projected/e383eaae-c654-4e64-be23-cb7a9cef6df7-kube-api-access-28klv\") pod \"csi-hostpathplugin-p8pxx\" (UID: \"e383eaae-c654-4e64-be23-cb7a9cef6df7\") " pod="hostpath-provisioner/csi-hostpathplugin-p8pxx" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.296764 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7j9tp" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.298525 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kl4tp\" (UniqueName: \"kubernetes.io/projected/71735d28-4973-432d-9884-8979622121ec-kube-api-access-kl4tp\") pod \"package-server-manager-789f6589d5-z8tpl\" (UID: \"71735d28-4973-432d-9884-8979622121ec\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z8tpl" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.315945 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgbk5\" (UniqueName: \"kubernetes.io/projected/ebaea921-5d50-4d64-b73e-db0feab77248-kube-api-access-hgbk5\") pod \"collect-profiles-29415225-grqtz\" (UID: \"ebaea921-5d50-4d64-b73e-db0feab77248\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-grqtz" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.332458 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9wtdc" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.336213 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:22 crc kubenswrapper[4742]: E1205 05:54:22.336644 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:22.836624782 +0000 UTC m=+138.748759844 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.338808 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-fpfzc" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.339388 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clrzk\" (UniqueName: \"kubernetes.io/projected/997634d0-c379-4978-a8a5-4da39a072ff4-kube-api-access-clrzk\") pod \"marketplace-operator-79b997595-dfmr9\" (UID: \"997634d0-c379-4978-a8a5-4da39a072ff4\") " pod="openshift-marketplace/marketplace-operator-79b997595-dfmr9" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.351738 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z8tpl" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.352874 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ljxw\" (UniqueName: \"kubernetes.io/projected/3cf6ebcf-1dcf-4bcf-a2bb-80b4b7d7cd21-kube-api-access-7ljxw\") pod \"service-ca-operator-777779d784-bxxl2\" (UID: \"3cf6ebcf-1dcf-4bcf-a2bb-80b4b7d7cd21\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-bxxl2" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.359840 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-grqtz" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.380023 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9kkww\" (UniqueName: \"kubernetes.io/projected/2a89062e-1da0-4abd-a415-92c6fd9e76f4-kube-api-access-9kkww\") pod \"service-ca-9c57cc56f-7jwfc\" (UID: \"2a89062e-1da0-4abd-a415-92c6fd9e76f4\") " pod="openshift-service-ca/service-ca-9c57cc56f-7jwfc" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.382399 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wvkz9" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.382854 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-dfmr9" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.391918 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-qhd4g" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.400397 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pf9h9\" (UniqueName: \"kubernetes.io/projected/0d05e5b0-e7d1-4e07-923d-79473a1532d2-kube-api-access-pf9h9\") pod \"olm-operator-6b444d44fb-58gtq\" (UID: \"0d05e5b0-e7d1-4e07-923d-79473a1532d2\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-58gtq" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.400520 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-ngjs9" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.411635 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-rd6kc" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.421904 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-p8pxx" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.433963 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x9xsd"] Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.437322 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:22 crc kubenswrapper[4742]: E1205 05:54:22.437781 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:22.937765946 +0000 UTC m=+138.849901008 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.447626 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-xmjng"] Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.538821 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:22 crc kubenswrapper[4742]: E1205 05:54:22.538985 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:23.038954891 +0000 UTC m=+138.951089953 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.539066 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:22 crc kubenswrapper[4742]: E1205 05:54:22.539456 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:23.039446788 +0000 UTC m=+138.951581890 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.593165 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" event={"ID":"defcf8e8-7650-448f-9950-3434978ee21d","Type":"ContainerStarted","Data":"f750600ca4aaf4d047aa42962c555e43e4e335410822311789adc175f9805fc5"} Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.602304 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-xqfl8" event={"ID":"eef2f79c-627c-4bd0-829a-8c16b1f85143","Type":"ContainerStarted","Data":"c43f1006d9655f03239304d488e9ec0b4388c0a66739316b602f9b31c9fcd930"} Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.604577 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-zk4vj" event={"ID":"518731b7-0f61-40b4-ad6c-c49383c0dd5b","Type":"ContainerStarted","Data":"a7d12183802e44fa2e1c1a679a0f2a4a934763f6406648bbe2388809c60eb83d"} Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.613017 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s8jnz" event={"ID":"439d1335-3f44-4f26-ad21-b8580866130c","Type":"ContainerStarted","Data":"c6f36c37f4126d1659e13577529ca9cd0f5b51546c15541a54a1d8f530b3a6e6"} Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.619963 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-vfstf" event={"ID":"a302e9ed-44a5-41e8-8e91-c37771dca329","Type":"ContainerStarted","Data":"04c62f9005eb5c892bf941bb9b5a4863a1f2ada64fbe8cc3e2cac8758d9c2aa5"} Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.620850 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-d6dsw" event={"ID":"df7f2c2a-0105-489e-9087-3fbf406856a9","Type":"ContainerStarted","Data":"233204082e7a476ca0d8be0c285da32a2e3650f4b5371d69d337c0e50452d322"} Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.625869 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-7jwfc" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.629642 4742 generic.go:334] "Generic (PLEG): container finished" podID="b5c1e336-d85b-42a8-a268-2fed8fe3fe98" containerID="c05053a6b467acd0c87ab16b89201eeaf0477196b86ffa7a714fb81f764fc3dc" exitCode=0 Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.629830 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" event={"ID":"b5c1e336-d85b-42a8-a268-2fed8fe3fe98","Type":"ContainerDied","Data":"c05053a6b467acd0c87ab16b89201eeaf0477196b86ffa7a714fb81f764fc3dc"} Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.629980 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" event={"ID":"b5c1e336-d85b-42a8-a268-2fed8fe3fe98","Type":"ContainerStarted","Data":"946e30040dd9d8cc745833edf116abe8283ce74cacf56376e8d2513312ffb0bc"} Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.636206 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hxw88" event={"ID":"956af782-f4c5-4000-9fdb-2693248d5b52","Type":"ContainerStarted","Data":"6b29f3cf5ad2f02d93fa86ae54a0107c53c6ea2b7df69048de123b8f6091d7d4"} Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.639612 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:22 crc kubenswrapper[4742]: E1205 05:54:22.639790 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:23.139769444 +0000 UTC m=+139.051904506 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.639968 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.640665 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-79dwh" event={"ID":"51f64621-97df-411c-bb21-c24a7c2976be","Type":"ContainerStarted","Data":"6e7dc83c704db01324b66e54b7eb41d412c23b8a5949423ed7871ac2d514af7e"} Dec 05 05:54:22 crc kubenswrapper[4742]: E1205 05:54:22.640976 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:23.140960145 +0000 UTC m=+139.053095237 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:22 crc kubenswrapper[4742]: W1205 05:54:22.641448 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podae2344f6_2b2e_4071_a215_77b7513f3138.slice/crio-ddc9da3972f2791968a7ded731cc89753295c51ca783281d2550859404a3e213 WatchSource:0}: Error finding container ddc9da3972f2791968a7ded731cc89753295c51ca783281d2550859404a3e213: Status 404 returned error can't find the container with id ddc9da3972f2791968a7ded731cc89753295c51ca783281d2550859404a3e213 Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.643496 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pxlnb" event={"ID":"4b516291-ecb5-48f1-8279-1448a2ad8f03","Type":"ContainerStarted","Data":"ffa6c779ecb4963e4ccfd3879fba0412862068fdaa0237f0dd6b3dd50c85307e"} Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.643546 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pxlnb" event={"ID":"4b516291-ecb5-48f1-8279-1448a2ad8f03","Type":"ContainerStarted","Data":"a40c90c1d2ef50ac9f2b0c75391658091ae15aa81afb5df5e37455c506c8d878"} Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.644897 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-bxxl2" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.645251 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rbzzz" event={"ID":"3d2e8268-b920-41b6-a22d-50dca94f8a10","Type":"ContainerStarted","Data":"5b172c49e99e4f642ae17b59508abf2ff8f085c1d65c6d143a63252ee43efe57"} Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.645287 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rbzzz" event={"ID":"3d2e8268-b920-41b6-a22d-50dca94f8a10","Type":"ContainerStarted","Data":"2bf0f7469fac7825d23c53234bd30efce0b2d162fc59d4dc9c15091cf7fe1e24"} Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.646266 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2279c" event={"ID":"0c7188d0-4020-4749-8bd6-98b637ce3f3c","Type":"ContainerStarted","Data":"6df29b27f7728e446b6b15cbc3ab227870683efa6441186dce4c1617a0cfb9eb"} Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.647091 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-gn9cx" event={"ID":"bfef6735-7572-4411-b37d-b194d84534de","Type":"ContainerStarted","Data":"c1fe437686967f42bce24bcff0403b5b7ed8a56425149b2688a797e947d4462f"} Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.648491 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-778cz" event={"ID":"91c75381-2f50-415e-b5c8-e1261be30bbc","Type":"ContainerStarted","Data":"362a2e6253948e9db73071b23feeb0fc9f46d56586a15bf87a748fbe762f5b36"} Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.650122 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-45686" event={"ID":"f3d9ea9f-6af6-42ea-9298-2e970da2572e","Type":"ContainerStarted","Data":"fa95dfb41e16808fd01d1ff1998f157e08e570333fbb8c56b82cee460b71975f"} Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.685382 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-58gtq" Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.708857 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rsr6b"] Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.750431 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:22 crc kubenswrapper[4742]: E1205 05:54:22.751214 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:23.251195929 +0000 UTC m=+139.163331011 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.808550 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-24g5k"] Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.854301 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:22 crc kubenswrapper[4742]: E1205 05:54:22.854839 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:23.354822107 +0000 UTC m=+139.266957169 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.953190 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-flkxk"] Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.953551 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k5bqg"] Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.955609 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:22 crc kubenswrapper[4742]: E1205 05:54:22.955920 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:23.455905319 +0000 UTC m=+139.368040381 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.973512 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vntjw"] Dec 05 05:54:22 crc kubenswrapper[4742]: I1205 05:54:22.978446 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-cqzqd"] Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.016522 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mztl8"] Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.028484 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-4ndvx"] Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.034812 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-2cttt"] Dec 05 05:54:23 crc kubenswrapper[4742]: W1205 05:54:23.044717 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podafacd0cf_c997_4688_bc5c_17c8f729f9c9.slice/crio-eb99bdc5bded33dbb2f11d9368c63c3cb59cfb200694827c17c82de7b7f2937d WatchSource:0}: Error finding container eb99bdc5bded33dbb2f11d9368c63c3cb59cfb200694827c17c82de7b7f2937d: Status 404 returned error can't find the container with id eb99bdc5bded33dbb2f11d9368c63c3cb59cfb200694827c17c82de7b7f2937d Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.056746 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:23 crc kubenswrapper[4742]: E1205 05:54:23.057117 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:23.557103695 +0000 UTC m=+139.469238757 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.068150 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415225-grqtz"] Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.157717 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:23 crc kubenswrapper[4742]: E1205 05:54:23.158460 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:23.658439956 +0000 UTC m=+139.570575018 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.167974 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-p8pxx"] Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.259802 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:23 crc kubenswrapper[4742]: E1205 05:54:23.260152 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:23.760140719 +0000 UTC m=+139.672275781 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.378790 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:23 crc kubenswrapper[4742]: E1205 05:54:23.379601 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:23.879584866 +0000 UTC m=+139.791719928 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.489354 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:23 crc kubenswrapper[4742]: E1205 05:54:23.489670 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:23.989658485 +0000 UTC m=+139.901793547 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.574009 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-dfmr9"] Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.597448 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:23 crc kubenswrapper[4742]: E1205 05:54:23.599100 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:24.09907233 +0000 UTC m=+140.011207392 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.601292 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:23 crc kubenswrapper[4742]: E1205 05:54:23.601802 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:24.101788053 +0000 UTC m=+140.013923115 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.628131 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-7jwfc"] Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.678268 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9wtdc"] Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.704111 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:23 crc kubenswrapper[4742]: E1205 05:54:23.704433 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:24.204412277 +0000 UTC m=+140.116547339 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.704605 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:23 crc kubenswrapper[4742]: E1205 05:54:23.704885 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:24.204877823 +0000 UTC m=+140.117012885 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.706210 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x9xsd" event={"ID":"6acfde43-1835-4664-9b34-bacd8d98a715","Type":"ContainerStarted","Data":"d062774a688beb50fd6add8efc137c450834c2f447d32e312086350afe7f0a24"} Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.718918 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-zk4vj" event={"ID":"518731b7-0f61-40b4-ad6c-c49383c0dd5b","Type":"ContainerStarted","Data":"de05bec7a8195bec60db47b2488c59a4a3f1843a8432c6549359b704c6e6ce3e"} Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.722536 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wvkz9"] Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.724131 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s8jnz" event={"ID":"439d1335-3f44-4f26-ad21-b8580866130c","Type":"ContainerStarted","Data":"45589838b157e50e0819a72a36dd1228745e984d080d56bdc4e2d72b2727050b"} Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.725740 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-bxxl2"] Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.736266 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-gn9cx" event={"ID":"bfef6735-7572-4411-b37d-b194d84534de","Type":"ContainerStarted","Data":"091447273a8c966e9209bb1a5296563d19bdc43e00d51d7c12a185cd9a7b6cbc"} Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.736803 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-gn9cx" Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.737085 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-7j9tp"] Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.739558 4742 patch_prober.go:28] interesting pod/downloads-7954f5f757-gn9cx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.739604 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-gn9cx" podUID="bfef6735-7572-4411-b37d-b194d84534de" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.740167 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-dfmr9" event={"ID":"997634d0-c379-4978-a8a5-4da39a072ff4","Type":"ContainerStarted","Data":"0c79ca110f360b0197a2ac7f545dd0a176e83f72484aaac94d2ed1b3aad090ae"} Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.753329 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k5bqg" event={"ID":"afacd0cf-c997-4688-bc5c-17c8f729f9c9","Type":"ContainerStarted","Data":"eb99bdc5bded33dbb2f11d9368c63c3cb59cfb200694827c17c82de7b7f2937d"} Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.755020 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z8tpl"] Dec 05 05:54:23 crc kubenswrapper[4742]: W1205 05:54:23.759098 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2a89062e_1da0_4abd_a415_92c6fd9e76f4.slice/crio-18aed96670f3b47a8a80d73b35e68742974dbbf48720a4fc4acb056c6ace866c WatchSource:0}: Error finding container 18aed96670f3b47a8a80d73b35e68742974dbbf48720a4fc4acb056c6ace866c: Status 404 returned error can't find the container with id 18aed96670f3b47a8a80d73b35e68742974dbbf48720a4fc4acb056c6ace866c Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.775402 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-d6dsw" event={"ID":"df7f2c2a-0105-489e-9087-3fbf406856a9","Type":"ContainerStarted","Data":"ba3c5b3779cb629505e8fffa23f62cfd138c6f165cd49cd28650fc5f618af7a9"} Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.775718 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-d6dsw" Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.781430 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-24g5k" event={"ID":"e6e17c17-82b4-489b-a8c3-05b44b99e427","Type":"ContainerStarted","Data":"197816b97a6e98c2f48fadea4987499bce674bae0680adee56697746a82ea5d7"} Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.784906 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xmjng" event={"ID":"ae2344f6-2b2e-4071-a215-77b7513f3138","Type":"ContainerStarted","Data":"ddc9da3972f2791968a7ded731cc89753295c51ca783281d2550859404a3e213"} Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.785197 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-rbzzz" podStartSLOduration=120.785188457 podStartE2EDuration="2m0.785188457s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:23.78468324 +0000 UTC m=+139.696818302" watchObservedRunningTime="2025-12-05 05:54:23.785188457 +0000 UTC m=+139.697323519" Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.794895 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-p8pxx" event={"ID":"e383eaae-c654-4e64-be23-cb7a9cef6df7","Type":"ContainerStarted","Data":"857127a5450ff544517a93e00e856b7bd101376d0a3ab292836c08cfe9645190"} Dec 05 05:54:23 crc kubenswrapper[4742]: W1205 05:54:23.795520 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd42b86d2_579c_4fa1_aeb4_6d3d7a47798d.slice/crio-798856c7912d3b11f23662f49cf14e321ef76f3411d257e70c9cdf53624155ed WatchSource:0}: Error finding container 798856c7912d3b11f23662f49cf14e321ef76f3411d257e70c9cdf53624155ed: Status 404 returned error can't find the container with id 798856c7912d3b11f23662f49cf14e321ef76f3411d257e70c9cdf53624155ed Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.806431 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:23 crc kubenswrapper[4742]: E1205 05:54:23.806702 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:24.306684109 +0000 UTC m=+140.218819171 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.806809 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:23 crc kubenswrapper[4742]: E1205 05:54:23.807716 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:24.307709184 +0000 UTC m=+140.219844246 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.814478 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-fpfzc"] Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.815773 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2279c" event={"ID":"0c7188d0-4020-4749-8bd6-98b637ce3f3c","Type":"ContainerStarted","Data":"abe36ba16e7322e6e35e3ab2d4c8d5522eb63369ba15fd346b473da48bf29f06"} Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.816531 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2279c" Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.839895 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-flkxk" event={"ID":"4eedeb03-3593-40b4-954b-76a312b87bbf","Type":"ContainerStarted","Data":"9993c93c686157bdd2d707eecc31acfd8f8666885eb843aca1cc51d6f141009f"} Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.840876 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-grqtz" event={"ID":"ebaea921-5d50-4d64-b73e-db0feab77248","Type":"ContainerStarted","Data":"e46cf99be5f2930097fe188fbff2791c2b84b189e451666141baa180b639c672"} Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.842033 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-778cz" event={"ID":"91c75381-2f50-415e-b5c8-e1261be30bbc","Type":"ContainerStarted","Data":"d17a56b7502b023471f3fb32631c1f94d329b0a14decfdb1f1f2c37bc74d57c9"} Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.842738 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-rd6kc"] Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.843298 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-nfv6w" event={"ID":"f2d129d3-f117-492c-a680-a03e1ca560e1","Type":"ContainerStarted","Data":"92894e0704c3983dce6d313dcc6e65f8f76dda39ee6a3fbb7b2fb3f0205cb691"} Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.844099 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vntjw" event={"ID":"1dbf36d7-4088-4d93-90ec-5795e82dbc42","Type":"ContainerStarted","Data":"3f346034ae17632fd2e75100edacb1cc6594d16761ecfe17442de8f71e3ed2b8"} Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.848976 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-qhd4g"] Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.853755 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rsr6b" event={"ID":"93cbfc1e-ff82-4309-a7a8-dd57f1fc6616","Type":"ContainerStarted","Data":"1c4e7e71f2fb43ec3a6006a8c1e805571acf87a563c26b4f73b0aa99a71b0ea9"} Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.853880 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-d6dsw" Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.855871 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2279c" Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.857606 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-vfstf" event={"ID":"a302e9ed-44a5-41e8-8e91-c37771dca329","Type":"ContainerStarted","Data":"e2b2237464488a3a85672795a12d4413a69b6eee6202ee5db9aa722ebeba7bf0"} Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.858088 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-vfstf" Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.859152 4742 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-vfstf container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.13:8443/healthz\": dial tcp 10.217.0.13:8443: connect: connection refused" start-of-body= Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.859190 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-vfstf" podUID="a302e9ed-44a5-41e8-8e91-c37771dca329" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.13:8443/healthz\": dial tcp 10.217.0.13:8443: connect: connection refused" Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.877836 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-79dwh" event={"ID":"51f64621-97df-411c-bb21-c24a7c2976be","Type":"ContainerStarted","Data":"8eeb3ea4a0c7da56d21a7bdf563dab61c64c1842335abcf4169711f0e2b95147"} Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.884929 4742 generic.go:334] "Generic (PLEG): container finished" podID="defcf8e8-7650-448f-9950-3434978ee21d" containerID="e6fc1c39f8a0694011c139532a9ed5becd4332981a60231a9b705bbc8aca62f5" exitCode=0 Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.884994 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" event={"ID":"defcf8e8-7650-448f-9950-3434978ee21d","Type":"ContainerDied","Data":"e6fc1c39f8a0694011c139532a9ed5becd4332981a60231a9b705bbc8aca62f5"} Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.887611 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-2cttt" event={"ID":"0f6dbf90-5722-4c03-b815-aa25831f6942","Type":"ContainerStarted","Data":"7baf724b943d55439280ff56550eb19f3b54dd00e62951122be7972834aae30a"} Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.888732 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-ngjs9" event={"ID":"88aaa7d3-241a-422a-807b-fb64376527c4","Type":"ContainerStarted","Data":"b01eefc4ee703ff90911e3844478c6cc5c6ece1705984c0c2a1c93d74f65610d"} Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.890517 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-45686" event={"ID":"f3d9ea9f-6af6-42ea-9298-2e970da2572e","Type":"ContainerStarted","Data":"02d31a7fe7e634d441cbc67e03a1804c2214ff3263bbf926965bf63e20f6b0cc"} Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.892642 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-cqzqd" event={"ID":"57ef3310-7e27-4216-9fa1-ccaa1c61cd4a","Type":"ContainerStarted","Data":"bf8a5675df9ae6c12232c046ba25d3509b35f0e6986e10600eb9c1fede0daa54"} Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.894177 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mztl8" event={"ID":"c932a965-637c-4db4-8dc4-b458856a4275","Type":"ContainerStarted","Data":"f3cf240da6e3066d06d3c59c80655918d548b13211accf7c3bf41dbe6460fa74"} Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.896523 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4ndvx" event={"ID":"ef9d3f4a-2357-4cf8-94d6-5c9bb31a2e5c","Type":"ContainerStarted","Data":"a03d2d9087b485d1ca3680a999b55fcd088914067677bee4bdf3b5555282daa5"} Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.898993 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pxlnb" event={"ID":"4b516291-ecb5-48f1-8279-1448a2ad8f03","Type":"ContainerStarted","Data":"31d0f3eba1783ba7474b9c6156b4965162c507a49e3d076cbcb7d9b179817245"} Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.908039 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:23 crc kubenswrapper[4742]: E1205 05:54:23.908201 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:24.408182145 +0000 UTC m=+140.320317207 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:23 crc kubenswrapper[4742]: I1205 05:54:23.909500 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:23 crc kubenswrapper[4742]: E1205 05:54:23.911134 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:24.411123145 +0000 UTC m=+140.323258207 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:24 crc kubenswrapper[4742]: I1205 05:54:24.004104 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2279c" podStartSLOduration=120.004089671 podStartE2EDuration="2m0.004089671s" podCreationTimestamp="2025-12-05 05:52:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:23.953150356 +0000 UTC m=+139.865285418" watchObservedRunningTime="2025-12-05 05:54:24.004089671 +0000 UTC m=+139.916224733" Dec 05 05:54:24 crc kubenswrapper[4742]: I1205 05:54:24.004783 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-d6dsw" podStartSLOduration=121.004776984 podStartE2EDuration="2m1.004776984s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:23.999458273 +0000 UTC m=+139.911593345" watchObservedRunningTime="2025-12-05 05:54:24.004776984 +0000 UTC m=+139.916912046" Dec 05 05:54:24 crc kubenswrapper[4742]: I1205 05:54:24.007715 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-58gtq"] Dec 05 05:54:24 crc kubenswrapper[4742]: I1205 05:54:24.011822 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:24 crc kubenswrapper[4742]: E1205 05:54:24.012719 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:24.512701644 +0000 UTC m=+140.424836696 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:24 crc kubenswrapper[4742]: I1205 05:54:24.070266 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-778cz" podStartSLOduration=121.070247194 podStartE2EDuration="2m1.070247194s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:24.067969316 +0000 UTC m=+139.980104378" watchObservedRunningTime="2025-12-05 05:54:24.070247194 +0000 UTC m=+139.982382276" Dec 05 05:54:24 crc kubenswrapper[4742]: I1205 05:54:24.113477 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:24 crc kubenswrapper[4742]: E1205 05:54:24.113890 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:24.613870699 +0000 UTC m=+140.526005811 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:24 crc kubenswrapper[4742]: I1205 05:54:24.146653 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-s8jnz" podStartSLOduration=121.146630535 podStartE2EDuration="2m1.146630535s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:24.108142374 +0000 UTC m=+140.020277436" watchObservedRunningTime="2025-12-05 05:54:24.146630535 +0000 UTC m=+140.058765607" Dec 05 05:54:24 crc kubenswrapper[4742]: I1205 05:54:24.148327 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-79dwh" podStartSLOduration=121.148312492 podStartE2EDuration="2m1.148312492s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:24.146068355 +0000 UTC m=+140.058203417" watchObservedRunningTime="2025-12-05 05:54:24.148312492 +0000 UTC m=+140.060447574" Dec 05 05:54:24 crc kubenswrapper[4742]: I1205 05:54:24.186640 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-gn9cx" podStartSLOduration=121.186618476 podStartE2EDuration="2m1.186618476s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:24.184456403 +0000 UTC m=+140.096591495" watchObservedRunningTime="2025-12-05 05:54:24.186618476 +0000 UTC m=+140.098753538" Dec 05 05:54:24 crc kubenswrapper[4742]: I1205 05:54:24.215089 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:24 crc kubenswrapper[4742]: E1205 05:54:24.215377 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:24.715345084 +0000 UTC m=+140.627480166 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:24 crc kubenswrapper[4742]: I1205 05:54:24.226859 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-vfstf" podStartSLOduration=121.226839086 podStartE2EDuration="2m1.226839086s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:24.22636946 +0000 UTC m=+140.138504522" watchObservedRunningTime="2025-12-05 05:54:24.226839086 +0000 UTC m=+140.138974148" Dec 05 05:54:24 crc kubenswrapper[4742]: I1205 05:54:24.317938 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:24 crc kubenswrapper[4742]: E1205 05:54:24.318663 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:24.818640642 +0000 UTC m=+140.730775724 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:24 crc kubenswrapper[4742]: I1205 05:54:24.419383 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:24 crc kubenswrapper[4742]: E1205 05:54:24.422315 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:24.92225312 +0000 UTC m=+140.834388182 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:24 crc kubenswrapper[4742]: I1205 05:54:24.524075 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:24 crc kubenswrapper[4742]: E1205 05:54:24.524554 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:25.024538073 +0000 UTC m=+140.936673125 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:24 crc kubenswrapper[4742]: I1205 05:54:24.625804 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:24 crc kubenswrapper[4742]: E1205 05:54:24.625908 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:25.125880524 +0000 UTC m=+141.038015596 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:24 crc kubenswrapper[4742]: I1205 05:54:24.626869 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:24 crc kubenswrapper[4742]: E1205 05:54:24.627908 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:25.127894162 +0000 UTC m=+141.040029224 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:24 crc kubenswrapper[4742]: I1205 05:54:24.728470 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:24 crc kubenswrapper[4742]: E1205 05:54:24.728784 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:25.228760507 +0000 UTC m=+141.140895569 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:24 crc kubenswrapper[4742]: I1205 05:54:24.744492 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:24 crc kubenswrapper[4742]: E1205 05:54:24.747135 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:25.247118692 +0000 UTC m=+141.159253754 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:24 crc kubenswrapper[4742]: I1205 05:54:24.851014 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:24 crc kubenswrapper[4742]: E1205 05:54:24.851487 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:25.351471506 +0000 UTC m=+141.263606568 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:24 crc kubenswrapper[4742]: I1205 05:54:24.952606 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:24 crc kubenswrapper[4742]: E1205 05:54:24.953026 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:25.453006583 +0000 UTC m=+141.365141655 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:24 crc kubenswrapper[4742]: I1205 05:54:24.954082 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-dfmr9" event={"ID":"997634d0-c379-4978-a8a5-4da39a072ff4","Type":"ContainerStarted","Data":"b4d563ffc52fc72b39f577ec801c1678145af630b95fecd42e8816988ba906fa"} Dec 05 05:54:24 crc kubenswrapper[4742]: I1205 05:54:24.954419 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-dfmr9" Dec 05 05:54:24 crc kubenswrapper[4742]: I1205 05:54:24.956614 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-grqtz" event={"ID":"ebaea921-5d50-4d64-b73e-db0feab77248","Type":"ContainerStarted","Data":"f852d749ea8342bd3d3752c9c9b2516fd2c9158e32d518022257f1233a574cd3"} Dec 05 05:54:24 crc kubenswrapper[4742]: I1205 05:54:24.962267 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vntjw" event={"ID":"1dbf36d7-4088-4d93-90ec-5795e82dbc42","Type":"ContainerStarted","Data":"a057cc8ae025c3670bad68de96a1ab67ffff43870ec95c9b28af0ab8cbb4e856"} Dec 05 05:54:24 crc kubenswrapper[4742]: I1205 05:54:24.970177 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-ngjs9" event={"ID":"88aaa7d3-241a-422a-807b-fb64376527c4","Type":"ContainerStarted","Data":"13a33f332c5390b57737ea313575ca56f93a5d3ad61009d5d837a43bcd196401"} Dec 05 05:54:24 crc kubenswrapper[4742]: I1205 05:54:24.981413 4742 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-dfmr9 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.37:8080/healthz\": dial tcp 10.217.0.37:8080: connect: connection refused" start-of-body= Dec 05 05:54:24 crc kubenswrapper[4742]: I1205 05:54:24.981482 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-dfmr9" podUID="997634d0-c379-4978-a8a5-4da39a072ff4" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.37:8080/healthz\": dial tcp 10.217.0.37:8080: connect: connection refused" Dec 05 05:54:24 crc kubenswrapper[4742]: I1205 05:54:24.997463 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" event={"ID":"b5c1e336-d85b-42a8-a268-2fed8fe3fe98","Type":"ContainerStarted","Data":"1e10bbde3fb1ae0f93993af32ca3c17af1dcc2f2f299e757967de675b91cd8ec"} Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.008370 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-dfmr9" podStartSLOduration=122.008352378 podStartE2EDuration="2m2.008352378s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:24.981516394 +0000 UTC m=+140.893651466" watchObservedRunningTime="2025-12-05 05:54:25.008352378 +0000 UTC m=+140.920487440" Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.010863 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-58gtq" event={"ID":"0d05e5b0-e7d1-4e07-923d-79473a1532d2","Type":"ContainerStarted","Data":"4f528a753983af98ae4acb6803343de72a9000c2eabdf0d7be5e199431bef3d1"} Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.046411 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-grqtz" podStartSLOduration=122.046394263 podStartE2EDuration="2m2.046394263s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:25.04570906 +0000 UTC m=+140.957844122" watchObservedRunningTime="2025-12-05 05:54:25.046394263 +0000 UTC m=+140.958529335" Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.046720 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-ngjs9" podStartSLOduration=6.046716704 podStartE2EDuration="6.046716704s" podCreationTimestamp="2025-12-05 05:54:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:25.010898104 +0000 UTC m=+140.923033166" watchObservedRunningTime="2025-12-05 05:54:25.046716704 +0000 UTC m=+140.958851766" Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.049045 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x9xsd" event={"ID":"6acfde43-1835-4664-9b34-bacd8d98a715","Type":"ContainerStarted","Data":"8fea13a667b66af7582206e54b7deeb903496cdad927a1752ce2c1d3d3d0caab"} Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.056914 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:25 crc kubenswrapper[4742]: E1205 05:54:25.057997 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:25.557981768 +0000 UTC m=+141.470116830 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.087232 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hxw88" event={"ID":"956af782-f4c5-4000-9fdb-2693248d5b52","Type":"ContainerStarted","Data":"6ac8eaaf758ee329e0779d697a5da90168f7658e9d286539d5908d80f0373d1c"} Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.110515 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-vntjw" podStartSLOduration=122.110495956 podStartE2EDuration="2m2.110495956s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:25.075724472 +0000 UTC m=+140.987859554" watchObservedRunningTime="2025-12-05 05:54:25.110495956 +0000 UTC m=+141.022631028" Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.118669 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-qhd4g" event={"ID":"cca75767-0e7f-42b7-8acc-fdd795b5c30e","Type":"ContainerStarted","Data":"a8c1527ea24e1bbc9561700245084c88aeb89986fa63bc0919ed774c72d26813"} Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.126285 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-rd6kc" event={"ID":"9d820c8f-0796-4541-b7f9-bc7927cdbb45","Type":"ContainerStarted","Data":"dedb128a3de22e240495a28e80230ee18c1b778f93c767cd18b8c594084db2f3"} Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.143492 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-hxw88" podStartSLOduration=122.143433757 podStartE2EDuration="2m2.143433757s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:25.143365015 +0000 UTC m=+141.055500117" watchObservedRunningTime="2025-12-05 05:54:25.143433757 +0000 UTC m=+141.055568829" Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.145473 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x9xsd" podStartSLOduration=122.145465027 podStartE2EDuration="2m2.145465027s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:25.112434282 +0000 UTC m=+141.024569344" watchObservedRunningTime="2025-12-05 05:54:25.145465027 +0000 UTC m=+141.057600099" Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.159852 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:25 crc kubenswrapper[4742]: E1205 05:54:25.161451 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:25.66143007 +0000 UTC m=+141.573565202 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.227478 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-xqfl8" event={"ID":"eef2f79c-627c-4bd0-829a-8c16b1f85143","Type":"ContainerStarted","Data":"45b5772b7bafcc8abbafc01c651565520fbd03afdc7d9e73d845973777dd4fca"} Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.243527 4742 generic.go:334] "Generic (PLEG): container finished" podID="4eedeb03-3593-40b4-954b-76a312b87bbf" containerID="aed728d03eff193c32782149b3dfed6746879797a34d20c13aa05dbe0075ad57" exitCode=0 Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.243897 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-flkxk" event={"ID":"4eedeb03-3593-40b4-954b-76a312b87bbf","Type":"ContainerDied","Data":"aed728d03eff193c32782149b3dfed6746879797a34d20c13aa05dbe0075ad57"} Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.257886 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k5bqg" event={"ID":"afacd0cf-c997-4688-bc5c-17c8f729f9c9","Type":"ContainerStarted","Data":"e791028aaf4a9c9b75b4f9425ea201089c65501830deef1d9da7aa8f90225820"} Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.264539 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:25 crc kubenswrapper[4742]: E1205 05:54:25.264749 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:25.764719298 +0000 UTC m=+141.676854380 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.264870 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:25 crc kubenswrapper[4742]: E1205 05:54:25.265346 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:25.765337449 +0000 UTC m=+141.677472511 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.269161 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7j9tp" event={"ID":"0f7b345c-76d4-4f77-9b9d-7a9678976492","Type":"ContainerStarted","Data":"cbec2c5d9bc724e6f8ab5d8cfbadf524f4bebd8370dcbead388320af25d8d9e5"} Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.285738 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-7jwfc" event={"ID":"2a89062e-1da0-4abd-a415-92c6fd9e76f4","Type":"ContainerStarted","Data":"18aed96670f3b47a8a80d73b35e68742974dbbf48720a4fc4acb056c6ace866c"} Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.304501 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-bxxl2" event={"ID":"3cf6ebcf-1dcf-4bcf-a2bb-80b4b7d7cd21","Type":"ContainerStarted","Data":"6210129dd8c2b954e5da66f5a49c55194cc4244ebe8fd0a0ac52829c657d718f"} Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.344602 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wvkz9" event={"ID":"0100b9e7-bc1f-4ada-ac18-11bba1edd54b","Type":"ContainerStarted","Data":"302feb3b5c1bc5be959b5553bafe47e6e8e4d65f9f7a951ad4147e2ab2e06e8e"} Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.344658 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wvkz9" event={"ID":"0100b9e7-bc1f-4ada-ac18-11bba1edd54b","Type":"ContainerStarted","Data":"523126cd70c9893b10900a08e3ba579639d811aa046133f17990e973ebb50a4c"} Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.345178 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wvkz9" Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.366140 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.366539 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-24g5k" event={"ID":"e6e17c17-82b4-489b-a8c3-05b44b99e427","Type":"ContainerStarted","Data":"a96e6cb9d8b83c41e466a956371215cba5133c59a91da7481c3866e74e869014"} Dec 05 05:54:25 crc kubenswrapper[4742]: E1205 05:54:25.366772 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:25.866756252 +0000 UTC m=+141.778891314 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.366926 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:25 crc kubenswrapper[4742]: E1205 05:54:25.367178 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:25.867170726 +0000 UTC m=+141.779305788 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.392110 4742 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-wvkz9 container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.40:8443/healthz\": dial tcp 10.217.0.40:8443: connect: connection refused" start-of-body= Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.392162 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wvkz9" podUID="0100b9e7-bc1f-4ada-ac18-11bba1edd54b" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.40:8443/healthz\": dial tcp 10.217.0.40:8443: connect: connection refused" Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.435530 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z8tpl" event={"ID":"71735d28-4973-432d-9884-8979622121ec","Type":"ContainerStarted","Data":"c29dd37cf0710cdd6cb9fae662466168e1115d1c5c560697ffa01029d20ac133"} Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.468426 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:25 crc kubenswrapper[4742]: E1205 05:54:25.468722 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:25.968706694 +0000 UTC m=+141.880841756 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.478095 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9wtdc" event={"ID":"d42b86d2-579c-4fa1-aeb4-6d3d7a47798d","Type":"ContainerStarted","Data":"798856c7912d3b11f23662f49cf14e321ef76f3411d257e70c9cdf53624155ed"} Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.479021 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9wtdc" Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.508202 4742 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-9wtdc container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.26:5443/healthz\": dial tcp 10.217.0.26:5443: connect: connection refused" start-of-body= Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.508256 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9wtdc" podUID="d42b86d2-579c-4fa1-aeb4-6d3d7a47798d" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.26:5443/healthz\": dial tcp 10.217.0.26:5443: connect: connection refused" Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.509812 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wvkz9" podStartSLOduration=122.509801653 podStartE2EDuration="2m2.509801653s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:25.509009796 +0000 UTC m=+141.421144878" watchObservedRunningTime="2025-12-05 05:54:25.509801653 +0000 UTC m=+141.421936715" Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.510884 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-k5bqg" podStartSLOduration=122.51087693 podStartE2EDuration="2m2.51087693s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:25.433441633 +0000 UTC m=+141.345576705" watchObservedRunningTime="2025-12-05 05:54:25.51087693 +0000 UTC m=+141.423012102" Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.564049 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-zk4vj" event={"ID":"518731b7-0f61-40b4-ad6c-c49383c0dd5b","Type":"ContainerStarted","Data":"66256f4b3fe0a8471d604649a0ffa9d6e9384e040ebbf25a95b2d95f11f25fa8"} Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.587152 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xmjng" event={"ID":"ae2344f6-2b2e-4071-a215-77b7513f3138","Type":"ContainerStarted","Data":"f7dac5250ac66651b81e47c739bf78c06d1b4bd73943e80e8ba3450d85849e7f"} Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.633149 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:25 crc kubenswrapper[4742]: E1205 05:54:25.634723 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:26.134692326 +0000 UTC m=+142.046827388 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.663943 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9wtdc" podStartSLOduration=121.663918351 podStartE2EDuration="2m1.663918351s" podCreationTimestamp="2025-12-05 05:52:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:25.571751742 +0000 UTC m=+141.483886804" watchObservedRunningTime="2025-12-05 05:54:25.663918351 +0000 UTC m=+141.576053413" Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.664199 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-zk4vj" podStartSLOduration=122.66419375 podStartE2EDuration="2m2.66419375s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:25.663030601 +0000 UTC m=+141.575165673" watchObservedRunningTime="2025-12-05 05:54:25.66419375 +0000 UTC m=+141.576328822" Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.719399 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rsr6b" event={"ID":"93cbfc1e-ff82-4309-a7a8-dd57f1fc6616","Type":"ContainerStarted","Data":"81f8c6776b708056171e3aad3fd4fe8bc2ee3df296614d8f5d4ee2e95511df6a"} Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.741527 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:25 crc kubenswrapper[4742]: E1205 05:54:25.742352 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:26.242324821 +0000 UTC m=+142.154459883 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.744775 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-nfv6w" event={"ID":"f2d129d3-f117-492c-a680-a03e1ca560e1","Type":"ContainerStarted","Data":"0a7a2a0f0c18bc62a1425174766fd250437908f9522300a958db629288da94ae"} Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.748191 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rsr6b" podStartSLOduration=122.74817919 podStartE2EDuration="2m2.74817919s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:25.746042867 +0000 UTC m=+141.658177929" watchObservedRunningTime="2025-12-05 05:54:25.74817919 +0000 UTC m=+141.660314262" Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.764619 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mztl8" event={"ID":"c932a965-637c-4db4-8dc4-b458856a4275","Type":"ContainerStarted","Data":"3901f3d39dfe061a5643086f7836b3016be51e3ad86e955fce23fc62bd7042c4"} Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.778479 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-fpfzc" event={"ID":"930e305a-35d6-4053-8064-58fb2662d8b0","Type":"ContainerStarted","Data":"e440b06b93d8e5253ce748cf3695a37a2ba3afc69c1ce24c1dddfe183df2c38f"} Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.785006 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.786230 4742 patch_prober.go:28] interesting pod/downloads-7954f5f757-gn9cx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.786279 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-gn9cx" podUID="bfef6735-7572-4411-b37d-b194d84534de" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.794337 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-vfstf" Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.813786 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-nfv6w" podStartSLOduration=122.813766513 podStartE2EDuration="2m2.813766513s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:25.812733768 +0000 UTC m=+141.724868830" watchObservedRunningTime="2025-12-05 05:54:25.813766513 +0000 UTC m=+141.725901575" Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.841541 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-45686" podStartSLOduration=122.841522569 podStartE2EDuration="2m2.841522569s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:25.841317992 +0000 UTC m=+141.753453074" watchObservedRunningTime="2025-12-05 05:54:25.841522569 +0000 UTC m=+141.753657641" Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.843526 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:25 crc kubenswrapper[4742]: E1205 05:54:25.858581 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:26.358561779 +0000 UTC m=+142.270696841 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.955279 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:25 crc kubenswrapper[4742]: E1205 05:54:25.955628 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:26.455613924 +0000 UTC m=+142.367748986 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:25 crc kubenswrapper[4742]: I1205 05:54:25.956659 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mztl8" podStartSLOduration=122.956640839 podStartE2EDuration="2m2.956640839s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:25.954782225 +0000 UTC m=+141.866917277" watchObservedRunningTime="2025-12-05 05:54:25.956640839 +0000 UTC m=+141.868775901" Dec 05 05:54:26 crc kubenswrapper[4742]: I1205 05:54:26.030080 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pxlnb" podStartSLOduration=123.030040858 podStartE2EDuration="2m3.030040858s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:26.026373493 +0000 UTC m=+141.938508575" watchObservedRunningTime="2025-12-05 05:54:26.030040858 +0000 UTC m=+141.942175920" Dec 05 05:54:26 crc kubenswrapper[4742]: I1205 05:54:26.063375 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:26 crc kubenswrapper[4742]: E1205 05:54:26.063730 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:26.563718055 +0000 UTC m=+142.475853117 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:26 crc kubenswrapper[4742]: I1205 05:54:26.166081 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:26 crc kubenswrapper[4742]: E1205 05:54:26.166801 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:26.666779354 +0000 UTC m=+142.578914416 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:26 crc kubenswrapper[4742]: I1205 05:54:26.178298 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-nfv6w" Dec 05 05:54:26 crc kubenswrapper[4742]: I1205 05:54:26.194156 4742 patch_prober.go:28] interesting pod/router-default-5444994796-nfv6w container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 05:54:26 crc kubenswrapper[4742]: [-]has-synced failed: reason withheld Dec 05 05:54:26 crc kubenswrapper[4742]: [+]process-running ok Dec 05 05:54:26 crc kubenswrapper[4742]: healthz check failed Dec 05 05:54:26 crc kubenswrapper[4742]: I1205 05:54:26.194216 4742 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nfv6w" podUID="f2d129d3-f117-492c-a680-a03e1ca560e1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 05:54:26 crc kubenswrapper[4742]: I1205 05:54:26.212094 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:54:26 crc kubenswrapper[4742]: I1205 05:54:26.267676 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:26 crc kubenswrapper[4742]: E1205 05:54:26.268290 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:26.76825939 +0000 UTC m=+142.680394452 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:26 crc kubenswrapper[4742]: I1205 05:54:26.368557 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:26 crc kubenswrapper[4742]: E1205 05:54:26.369186 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:26.869041072 +0000 UTC m=+142.781176134 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:26 crc kubenswrapper[4742]: I1205 05:54:26.471927 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:26 crc kubenswrapper[4742]: E1205 05:54:26.472688 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:26.972673001 +0000 UTC m=+142.884808063 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:26 crc kubenswrapper[4742]: I1205 05:54:26.572851 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:26 crc kubenswrapper[4742]: E1205 05:54:26.573074 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:27.073027308 +0000 UTC m=+142.985162370 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:26 crc kubenswrapper[4742]: I1205 05:54:26.573296 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:26 crc kubenswrapper[4742]: E1205 05:54:26.573605 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:27.073596407 +0000 UTC m=+142.985731459 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:26 crc kubenswrapper[4742]: I1205 05:54:26.674046 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:26 crc kubenswrapper[4742]: E1205 05:54:26.674354 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:27.174339227 +0000 UTC m=+143.086474289 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:26 crc kubenswrapper[4742]: I1205 05:54:26.674620 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:26 crc kubenswrapper[4742]: E1205 05:54:26.674962 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:27.174955078 +0000 UTC m=+143.087090140 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:26 crc kubenswrapper[4742]: I1205 05:54:26.775347 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:26 crc kubenswrapper[4742]: E1205 05:54:26.775631 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:27.275616536 +0000 UTC m=+143.187751598 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:26 crc kubenswrapper[4742]: I1205 05:54:26.797809 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4ndvx" event={"ID":"ef9d3f4a-2357-4cf8-94d6-5c9bb31a2e5c","Type":"ContainerStarted","Data":"ad8abb4ce557709bf19c5d2a83593075a3ebd771f56b9aacad0042df353e15cc"} Dec 05 05:54:26 crc kubenswrapper[4742]: I1205 05:54:26.797861 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4ndvx" event={"ID":"ef9d3f4a-2357-4cf8-94d6-5c9bb31a2e5c","Type":"ContainerStarted","Data":"9aaa41bcef19b05ccfcb4374618b3bef4f5bc20ef5511fbe79a332f5809aba84"} Dec 05 05:54:26 crc kubenswrapper[4742]: I1205 05:54:26.846075 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-4ndvx" podStartSLOduration=123.846038794 podStartE2EDuration="2m3.846038794s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:26.84355921 +0000 UTC m=+142.755694272" watchObservedRunningTime="2025-12-05 05:54:26.846038794 +0000 UTC m=+142.758173856" Dec 05 05:54:26 crc kubenswrapper[4742]: I1205 05:54:26.853581 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-cqzqd" event={"ID":"57ef3310-7e27-4216-9fa1-ccaa1c61cd4a","Type":"ContainerStarted","Data":"4e13e378f46374fbb74d621bc2108bd95c3f4b140aeae01a6d9da875b466f0e7"} Dec 05 05:54:26 crc kubenswrapper[4742]: I1205 05:54:26.853636 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-cqzqd" event={"ID":"57ef3310-7e27-4216-9fa1-ccaa1c61cd4a","Type":"ContainerStarted","Data":"c103fcbadfa1adac9c6a00da07ed1a53aa6b135c4ac221842162a9bf71074f81"} Dec 05 05:54:26 crc kubenswrapper[4742]: I1205 05:54:26.885693 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:26 crc kubenswrapper[4742]: E1205 05:54:26.887545 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:27.387529717 +0000 UTC m=+143.299664779 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:26 crc kubenswrapper[4742]: I1205 05:54:26.901240 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" event={"ID":"b5c1e336-d85b-42a8-a268-2fed8fe3fe98","Type":"ContainerStarted","Data":"f44576306963054e353fa3fb7092ec44401191a32f399e713fc62bf2111025b4"} Dec 05 05:54:26 crc kubenswrapper[4742]: I1205 05:54:26.928684 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-p8pxx" event={"ID":"e383eaae-c654-4e64-be23-cb7a9cef6df7","Type":"ContainerStarted","Data":"634357b7db08c009456a4ffc746aab573a03a01355cc3f7aa615a8c09b775e13"} Dec 05 05:54:26 crc kubenswrapper[4742]: I1205 05:54:26.945367 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" podStartSLOduration=123.945352106 podStartE2EDuration="2m3.945352106s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:26.944866589 +0000 UTC m=+142.857001671" watchObservedRunningTime="2025-12-05 05:54:26.945352106 +0000 UTC m=+142.857487158" Dec 05 05:54:26 crc kubenswrapper[4742]: I1205 05:54:26.946793 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-cqzqd" podStartSLOduration=123.946788645 podStartE2EDuration="2m3.946788645s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:26.906431241 +0000 UTC m=+142.818566313" watchObservedRunningTime="2025-12-05 05:54:26.946788645 +0000 UTC m=+142.858923707" Dec 05 05:54:26 crc kubenswrapper[4742]: I1205 05:54:26.961334 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z8tpl" event={"ID":"71735d28-4973-432d-9884-8979622121ec","Type":"ContainerStarted","Data":"aab8bacb67b332838dcb6a3ffa805c9faf85f50bfecc3ad06adfda751279fc23"} Dec 05 05:54:26 crc kubenswrapper[4742]: I1205 05:54:26.961382 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z8tpl" event={"ID":"71735d28-4973-432d-9884-8979622121ec","Type":"ContainerStarted","Data":"71847e0f70f4fc430b05ce6fa7dc30d1131d92b4f15f1c1c0dab24cb26e6fc73"} Dec 05 05:54:26 crc kubenswrapper[4742]: I1205 05:54:26.962184 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z8tpl" Dec 05 05:54:26 crc kubenswrapper[4742]: I1205 05:54:26.987700 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:26 crc kubenswrapper[4742]: E1205 05:54:26.988769 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:27.488748414 +0000 UTC m=+143.400883476 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.001344 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-24g5k" event={"ID":"e6e17c17-82b4-489b-a8c3-05b44b99e427","Type":"ContainerStarted","Data":"cd26b9855cc1756680434e81fc68730ec4baa55ed48c7427061b076f2aabece8"} Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.038256 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-fpfzc" event={"ID":"930e305a-35d6-4053-8064-58fb2662d8b0","Type":"ContainerStarted","Data":"94c9598a9c6f5a390f98db22cc820d45a6200102b3c4dbf57a550d9058550c4d"} Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.040452 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-24g5k" podStartSLOduration=124.040440624 podStartE2EDuration="2m4.040440624s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:27.039376178 +0000 UTC m=+142.951511240" watchObservedRunningTime="2025-12-05 05:54:27.040440624 +0000 UTC m=+142.952575686" Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.040529 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z8tpl" podStartSLOduration=124.040526177 podStartE2EDuration="2m4.040526177s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:27.002180291 +0000 UTC m=+142.914315353" watchObservedRunningTime="2025-12-05 05:54:27.040526177 +0000 UTC m=+142.952661239" Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.066326 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-rd6kc" event={"ID":"9d820c8f-0796-4541-b7f9-bc7927cdbb45","Type":"ContainerStarted","Data":"63eacba1b46e31dda05305442c3dee81f2ef96216dd620e198f501077daaad0f"} Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.066376 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-rd6kc" event={"ID":"9d820c8f-0796-4541-b7f9-bc7927cdbb45","Type":"ContainerStarted","Data":"7d1251c10cfa9d7bd9399dcbae859582f45ced8b3d8ef099a2f45e2a0175af30"} Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.066932 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-rd6kc" Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.069326 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-fpfzc" podStartSLOduration=124.069315797 podStartE2EDuration="2m4.069315797s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:27.067235856 +0000 UTC m=+142.979370908" watchObservedRunningTime="2025-12-05 05:54:27.069315797 +0000 UTC m=+142.981450859" Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.089647 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:27 crc kubenswrapper[4742]: E1205 05:54:27.091236 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:27.591225583 +0000 UTC m=+143.503360645 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.104101 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-bxxl2" event={"ID":"3cf6ebcf-1dcf-4bcf-a2bb-80b4b7d7cd21","Type":"ContainerStarted","Data":"733bfe965702461d159257ae529545936dc44eefb1f23d72e2c4bdb62dcf4236"} Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.106177 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-rd6kc" podStartSLOduration=8.106163692 podStartE2EDuration="8.106163692s" podCreationTimestamp="2025-12-05 05:54:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:27.104452294 +0000 UTC m=+143.016587366" watchObservedRunningTime="2025-12-05 05:54:27.106163692 +0000 UTC m=+143.018298754" Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.155156 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-flkxk" event={"ID":"4eedeb03-3593-40b4-954b-76a312b87bbf","Type":"ContainerStarted","Data":"a6245e7547da7339b60e5493fca900d1463abf51eb1a47f74e67eb207619b645"} Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.155211 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-flkxk" Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.182359 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7j9tp" event={"ID":"0f7b345c-76d4-4f77-9b9d-7a9678976492","Type":"ContainerStarted","Data":"856f3f9633537f8cfa855843a3b28c6317f9fb233ed3321d5b759de35204e8ae"} Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.182413 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7j9tp" event={"ID":"0f7b345c-76d4-4f77-9b9d-7a9678976492","Type":"ContainerStarted","Data":"54728dc3bc3eaf579b7fa0023b5e75250bae1cbbda29ab3c2103013e16cabfcc"} Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.183283 4742 patch_prober.go:28] interesting pod/router-default-5444994796-nfv6w container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 05:54:27 crc kubenswrapper[4742]: [-]has-synced failed: reason withheld Dec 05 05:54:27 crc kubenswrapper[4742]: [+]process-running ok Dec 05 05:54:27 crc kubenswrapper[4742]: healthz check failed Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.183318 4742 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nfv6w" podUID="f2d129d3-f117-492c-a680-a03e1ca560e1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.190096 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:27 crc kubenswrapper[4742]: E1205 05:54:27.191618 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:27.691599831 +0000 UTC m=+143.603734883 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.209580 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-58gtq" event={"ID":"0d05e5b0-e7d1-4e07-923d-79473a1532d2","Type":"ContainerStarted","Data":"dd314aac9fd2a7aeece365623610c63891259bde0ee728bf94fee650fb79e408"} Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.210381 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-58gtq" Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.211882 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-bxxl2" podStartSLOduration=123.211872172 podStartE2EDuration="2m3.211872172s" podCreationTimestamp="2025-12-05 05:52:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:27.154040182 +0000 UTC m=+143.066175244" watchObservedRunningTime="2025-12-05 05:54:27.211872172 +0000 UTC m=+143.124007234" Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.212851 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-flkxk" podStartSLOduration=124.212846995 podStartE2EDuration="2m4.212846995s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:27.211425586 +0000 UTC m=+143.123560648" watchObservedRunningTime="2025-12-05 05:54:27.212846995 +0000 UTC m=+143.124982067" Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.227490 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" event={"ID":"defcf8e8-7650-448f-9950-3434978ee21d","Type":"ContainerStarted","Data":"4f6f9ae84f32c76f36ce517cc97a6f160b0a4bb79fb4ba70e66305bfd34d5609"} Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.238373 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-58gtq" Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.247961 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-7j9tp" podStartSLOduration=124.24794059 podStartE2EDuration="2m4.24794059s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:27.247868117 +0000 UTC m=+143.160003189" watchObservedRunningTime="2025-12-05 05:54:27.24794059 +0000 UTC m=+143.160075662" Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.254595 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-7jwfc" event={"ID":"2a89062e-1da0-4abd-a415-92c6fd9e76f4","Type":"ContainerStarted","Data":"54f5d2e6c97366f5751f1a9715c6b4a96958afd6f2867b90603d589cfdf476bf"} Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.270704 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-58gtq" podStartSLOduration=124.270686604 podStartE2EDuration="2m4.270686604s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:27.269422341 +0000 UTC m=+143.181557403" watchObservedRunningTime="2025-12-05 05:54:27.270686604 +0000 UTC m=+143.182821656" Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.293321 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-xqfl8" event={"ID":"eef2f79c-627c-4bd0-829a-8c16b1f85143","Type":"ContainerStarted","Data":"9a0b99375db6f36edf7773140950230b628ae8a2ee3af16d23157f51f5ad14d5"} Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.299767 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:27 crc kubenswrapper[4742]: E1205 05:54:27.300085 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:27.800073775 +0000 UTC m=+143.712208837 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.317141 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-2cttt" event={"ID":"0f6dbf90-5722-4c03-b815-aa25831f6942","Type":"ContainerStarted","Data":"9923e43ae4b7bb42f0d0181b2dc9105d2d49850d3af036e8c09d0dca00563a3e"} Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.339218 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-qhd4g" event={"ID":"cca75767-0e7f-42b7-8acc-fdd795b5c30e","Type":"ContainerStarted","Data":"bd80aff371f68d93633ebe359553c2fb33add447582efae6afe6615c830ff8d5"} Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.348610 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-7jwfc" podStartSLOduration=123.348593346 podStartE2EDuration="2m3.348593346s" podCreationTimestamp="2025-12-05 05:52:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:27.306297917 +0000 UTC m=+143.218432979" watchObservedRunningTime="2025-12-05 05:54:27.348593346 +0000 UTC m=+143.260728408" Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.372307 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xmjng" event={"ID":"ae2344f6-2b2e-4071-a215-77b7513f3138","Type":"ContainerStarted","Data":"ecaaa17bd18485604bef91d5f41edf834edf27ed3d8b2fe6bdbfec8d82bc5446"} Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.386346 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" podStartSLOduration=123.386330631 podStartE2EDuration="2m3.386330631s" podCreationTimestamp="2025-12-05 05:52:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:27.351392141 +0000 UTC m=+143.263527203" watchObservedRunningTime="2025-12-05 05:54:27.386330631 +0000 UTC m=+143.298465693" Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.398587 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9wtdc" event={"ID":"d42b86d2-579c-4fa1-aeb4-6d3d7a47798d","Type":"ContainerStarted","Data":"719421e56b6a7efd19c7f1b274f2e39fdd4892062ce4cdcfdd058aecd982f00d"} Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.399498 4742 patch_prober.go:28] interesting pod/downloads-7954f5f757-gn9cx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.399548 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-gn9cx" podUID="bfef6735-7572-4411-b37d-b194d84534de" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.400314 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:27 crc kubenswrapper[4742]: E1205 05:54:27.401580 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:27.90156318 +0000 UTC m=+143.813698232 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.405901 4742 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-dfmr9 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.37:8080/healthz\": dial tcp 10.217.0.37:8080: connect: connection refused" start-of-body= Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.406266 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-dfmr9" podUID="997634d0-c379-4978-a8a5-4da39a072ff4" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.37:8080/healthz\": dial tcp 10.217.0.37:8080: connect: connection refused" Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.440853 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-wvkz9" Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.477995 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-qhd4g" podStartSLOduration=8.477980012 podStartE2EDuration="8.477980012s" podCreationTimestamp="2025-12-05 05:54:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:27.425444793 +0000 UTC m=+143.337579875" watchObservedRunningTime="2025-12-05 05:54:27.477980012 +0000 UTC m=+143.390115074" Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.479387 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-xqfl8" podStartSLOduration=124.47937855 podStartE2EDuration="2m4.47937855s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:27.476664477 +0000 UTC m=+143.388799559" watchObservedRunningTime="2025-12-05 05:54:27.47937855 +0000 UTC m=+143.391513612" Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.547619 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:27 crc kubenswrapper[4742]: E1205 05:54:27.557604 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:28.057589193 +0000 UTC m=+143.969724255 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.591834 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-2cttt" podStartSLOduration=124.591806828 podStartE2EDuration="2m4.591806828s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:27.560530513 +0000 UTC m=+143.472665575" watchObservedRunningTime="2025-12-05 05:54:27.591806828 +0000 UTC m=+143.503941890" Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.659823 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:27 crc kubenswrapper[4742]: E1205 05:54:27.660220 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:28.160200317 +0000 UTC m=+144.072335379 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.683798 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xmjng" podStartSLOduration=124.68378151 podStartE2EDuration="2m4.68378151s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:27.602037266 +0000 UTC m=+143.514172318" watchObservedRunningTime="2025-12-05 05:54:27.68378151 +0000 UTC m=+143.595916572" Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.761881 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:27 crc kubenswrapper[4742]: E1205 05:54:27.762213 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:28.26220295 +0000 UTC m=+144.174338002 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.783998 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-kjh59"] Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.784952 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kjh59" Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.791848 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.798331 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kjh59"] Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.862671 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.863238 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff4c4d35-276e-47e9-8b12-76361e2005bf-utilities\") pod \"community-operators-kjh59\" (UID: \"ff4c4d35-276e-47e9-8b12-76361e2005bf\") " pod="openshift-marketplace/community-operators-kjh59" Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.863270 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff4c4d35-276e-47e9-8b12-76361e2005bf-catalog-content\") pod \"community-operators-kjh59\" (UID: \"ff4c4d35-276e-47e9-8b12-76361e2005bf\") " pod="openshift-marketplace/community-operators-kjh59" Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.863300 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgz9p\" (UniqueName: \"kubernetes.io/projected/ff4c4d35-276e-47e9-8b12-76361e2005bf-kube-api-access-pgz9p\") pod \"community-operators-kjh59\" (UID: \"ff4c4d35-276e-47e9-8b12-76361e2005bf\") " pod="openshift-marketplace/community-operators-kjh59" Dec 05 05:54:27 crc kubenswrapper[4742]: E1205 05:54:27.863391 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:28.363376945 +0000 UTC m=+144.275512007 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.964514 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.964586 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff4c4d35-276e-47e9-8b12-76361e2005bf-utilities\") pod \"community-operators-kjh59\" (UID: \"ff4c4d35-276e-47e9-8b12-76361e2005bf\") " pod="openshift-marketplace/community-operators-kjh59" Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.964638 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff4c4d35-276e-47e9-8b12-76361e2005bf-catalog-content\") pod \"community-operators-kjh59\" (UID: \"ff4c4d35-276e-47e9-8b12-76361e2005bf\") " pod="openshift-marketplace/community-operators-kjh59" Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.964668 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgz9p\" (UniqueName: \"kubernetes.io/projected/ff4c4d35-276e-47e9-8b12-76361e2005bf-kube-api-access-pgz9p\") pod \"community-operators-kjh59\" (UID: \"ff4c4d35-276e-47e9-8b12-76361e2005bf\") " pod="openshift-marketplace/community-operators-kjh59" Dec 05 05:54:27 crc kubenswrapper[4742]: E1205 05:54:27.964882 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:28.464864661 +0000 UTC m=+144.376999723 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.965163 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff4c4d35-276e-47e9-8b12-76361e2005bf-catalog-content\") pod \"community-operators-kjh59\" (UID: \"ff4c4d35-276e-47e9-8b12-76361e2005bf\") " pod="openshift-marketplace/community-operators-kjh59" Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.965170 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff4c4d35-276e-47e9-8b12-76361e2005bf-utilities\") pod \"community-operators-kjh59\" (UID: \"ff4c4d35-276e-47e9-8b12-76361e2005bf\") " pod="openshift-marketplace/community-operators-kjh59" Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.973618 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-dpnl4"] Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.974571 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dpnl4" Dec 05 05:54:27 crc kubenswrapper[4742]: I1205 05:54:27.992620 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.000122 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dpnl4"] Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.006914 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgz9p\" (UniqueName: \"kubernetes.io/projected/ff4c4d35-276e-47e9-8b12-76361e2005bf-kube-api-access-pgz9p\") pod \"community-operators-kjh59\" (UID: \"ff4c4d35-276e-47e9-8b12-76361e2005bf\") " pod="openshift-marketplace/community-operators-kjh59" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.065918 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.066417 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8eed6205-7703-433f-83cf-d7b51867e5ee-catalog-content\") pod \"certified-operators-dpnl4\" (UID: \"8eed6205-7703-433f-83cf-d7b51867e5ee\") " pod="openshift-marketplace/certified-operators-dpnl4" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.066485 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hn2bp\" (UniqueName: \"kubernetes.io/projected/8eed6205-7703-433f-83cf-d7b51867e5ee-kube-api-access-hn2bp\") pod \"certified-operators-dpnl4\" (UID: \"8eed6205-7703-433f-83cf-d7b51867e5ee\") " pod="openshift-marketplace/certified-operators-dpnl4" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.066527 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8eed6205-7703-433f-83cf-d7b51867e5ee-utilities\") pod \"certified-operators-dpnl4\" (UID: \"8eed6205-7703-433f-83cf-d7b51867e5ee\") " pod="openshift-marketplace/certified-operators-dpnl4" Dec 05 05:54:28 crc kubenswrapper[4742]: E1205 05:54:28.066628 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:28.566613036 +0000 UTC m=+144.478748098 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.103015 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kjh59" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.171662 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8eed6205-7703-433f-83cf-d7b51867e5ee-utilities\") pod \"certified-operators-dpnl4\" (UID: \"8eed6205-7703-433f-83cf-d7b51867e5ee\") " pod="openshift-marketplace/certified-operators-dpnl4" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.171711 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.171755 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8eed6205-7703-433f-83cf-d7b51867e5ee-catalog-content\") pod \"certified-operators-dpnl4\" (UID: \"8eed6205-7703-433f-83cf-d7b51867e5ee\") " pod="openshift-marketplace/certified-operators-dpnl4" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.171793 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hn2bp\" (UniqueName: \"kubernetes.io/projected/8eed6205-7703-433f-83cf-d7b51867e5ee-kube-api-access-hn2bp\") pod \"certified-operators-dpnl4\" (UID: \"8eed6205-7703-433f-83cf-d7b51867e5ee\") " pod="openshift-marketplace/certified-operators-dpnl4" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.172404 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8eed6205-7703-433f-83cf-d7b51867e5ee-utilities\") pod \"certified-operators-dpnl4\" (UID: \"8eed6205-7703-433f-83cf-d7b51867e5ee\") " pod="openshift-marketplace/certified-operators-dpnl4" Dec 05 05:54:28 crc kubenswrapper[4742]: E1205 05:54:28.172627 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:28.672616776 +0000 UTC m=+144.584751838 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.172953 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8eed6205-7703-433f-83cf-d7b51867e5ee-catalog-content\") pod \"certified-operators-dpnl4\" (UID: \"8eed6205-7703-433f-83cf-d7b51867e5ee\") " pod="openshift-marketplace/certified-operators-dpnl4" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.175346 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-99jn8"] Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.175758 4742 patch_prober.go:28] interesting pod/router-default-5444994796-nfv6w container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 05:54:28 crc kubenswrapper[4742]: [-]has-synced failed: reason withheld Dec 05 05:54:28 crc kubenswrapper[4742]: [+]process-running ok Dec 05 05:54:28 crc kubenswrapper[4742]: healthz check failed Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.175788 4742 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nfv6w" podUID="f2d129d3-f117-492c-a680-a03e1ca560e1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.176279 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-99jn8" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.195571 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9wtdc" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.207457 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-99jn8"] Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.265238 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hn2bp\" (UniqueName: \"kubernetes.io/projected/8eed6205-7703-433f-83cf-d7b51867e5ee-kube-api-access-hn2bp\") pod \"certified-operators-dpnl4\" (UID: \"8eed6205-7703-433f-83cf-d7b51867e5ee\") " pod="openshift-marketplace/certified-operators-dpnl4" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.272729 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.272933 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzhsr\" (UniqueName: \"kubernetes.io/projected/e64b157e-a58d-4dfd-8d97-be73077a1e25-kube-api-access-rzhsr\") pod \"community-operators-99jn8\" (UID: \"e64b157e-a58d-4dfd-8d97-be73077a1e25\") " pod="openshift-marketplace/community-operators-99jn8" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.273017 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e64b157e-a58d-4dfd-8d97-be73077a1e25-catalog-content\") pod \"community-operators-99jn8\" (UID: \"e64b157e-a58d-4dfd-8d97-be73077a1e25\") " pod="openshift-marketplace/community-operators-99jn8" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.273035 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e64b157e-a58d-4dfd-8d97-be73077a1e25-utilities\") pod \"community-operators-99jn8\" (UID: \"e64b157e-a58d-4dfd-8d97-be73077a1e25\") " pod="openshift-marketplace/community-operators-99jn8" Dec 05 05:54:28 crc kubenswrapper[4742]: E1205 05:54:28.273176 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:28.773162949 +0000 UTC m=+144.685298001 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.285906 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dpnl4" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.377851 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.378128 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e64b157e-a58d-4dfd-8d97-be73077a1e25-catalog-content\") pod \"community-operators-99jn8\" (UID: \"e64b157e-a58d-4dfd-8d97-be73077a1e25\") " pod="openshift-marketplace/community-operators-99jn8" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.378157 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e64b157e-a58d-4dfd-8d97-be73077a1e25-utilities\") pod \"community-operators-99jn8\" (UID: \"e64b157e-a58d-4dfd-8d97-be73077a1e25\") " pod="openshift-marketplace/community-operators-99jn8" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.378207 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzhsr\" (UniqueName: \"kubernetes.io/projected/e64b157e-a58d-4dfd-8d97-be73077a1e25-kube-api-access-rzhsr\") pod \"community-operators-99jn8\" (UID: \"e64b157e-a58d-4dfd-8d97-be73077a1e25\") " pod="openshift-marketplace/community-operators-99jn8" Dec 05 05:54:28 crc kubenswrapper[4742]: E1205 05:54:28.378688 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:28.878677142 +0000 UTC m=+144.790812204 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.379144 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e64b157e-a58d-4dfd-8d97-be73077a1e25-catalog-content\") pod \"community-operators-99jn8\" (UID: \"e64b157e-a58d-4dfd-8d97-be73077a1e25\") " pod="openshift-marketplace/community-operators-99jn8" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.379341 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e64b157e-a58d-4dfd-8d97-be73077a1e25-utilities\") pod \"community-operators-99jn8\" (UID: \"e64b157e-a58d-4dfd-8d97-be73077a1e25\") " pod="openshift-marketplace/community-operators-99jn8" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.415574 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-cbhtr"] Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.416460 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cbhtr" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.437891 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzhsr\" (UniqueName: \"kubernetes.io/projected/e64b157e-a58d-4dfd-8d97-be73077a1e25-kube-api-access-rzhsr\") pod \"community-operators-99jn8\" (UID: \"e64b157e-a58d-4dfd-8d97-be73077a1e25\") " pod="openshift-marketplace/community-operators-99jn8" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.442748 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-p8pxx" event={"ID":"e383eaae-c654-4e64-be23-cb7a9cef6df7","Type":"ContainerStarted","Data":"a32ebe510e0f827bac246c3feb8c3abccec0fa0578e06098c53dacf6611c395e"} Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.442788 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-p8pxx" event={"ID":"e383eaae-c654-4e64-be23-cb7a9cef6df7","Type":"ContainerStarted","Data":"39324d0bae42e556046f03ee2c5afbbfa17e5b069a0061c324954a3906f988c0"} Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.455139 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cbhtr"] Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.457169 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-fpfzc" event={"ID":"930e305a-35d6-4053-8064-58fb2662d8b0","Type":"ContainerStarted","Data":"c87ebca7801b7585e420251aaca623220b6ec9b3ec87ad01b8b305cf0df0ad8b"} Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.481565 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.481781 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cmlms\" (UniqueName: \"kubernetes.io/projected/7fc280e3-5842-4844-ad57-d3526ceeb957-kube-api-access-cmlms\") pod \"certified-operators-cbhtr\" (UID: \"7fc280e3-5842-4844-ad57-d3526ceeb957\") " pod="openshift-marketplace/certified-operators-cbhtr" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.481817 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7fc280e3-5842-4844-ad57-d3526ceeb957-catalog-content\") pod \"certified-operators-cbhtr\" (UID: \"7fc280e3-5842-4844-ad57-d3526ceeb957\") " pod="openshift-marketplace/certified-operators-cbhtr" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.481850 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7fc280e3-5842-4844-ad57-d3526ceeb957-utilities\") pod \"certified-operators-cbhtr\" (UID: \"7fc280e3-5842-4844-ad57-d3526ceeb957\") " pod="openshift-marketplace/certified-operators-cbhtr" Dec 05 05:54:28 crc kubenswrapper[4742]: E1205 05:54:28.482008 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:28.98198762 +0000 UTC m=+144.894122682 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.511377 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-99jn8" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.583699 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.584433 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cmlms\" (UniqueName: \"kubernetes.io/projected/7fc280e3-5842-4844-ad57-d3526ceeb957-kube-api-access-cmlms\") pod \"certified-operators-cbhtr\" (UID: \"7fc280e3-5842-4844-ad57-d3526ceeb957\") " pod="openshift-marketplace/certified-operators-cbhtr" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.584591 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7fc280e3-5842-4844-ad57-d3526ceeb957-catalog-content\") pod \"certified-operators-cbhtr\" (UID: \"7fc280e3-5842-4844-ad57-d3526ceeb957\") " pod="openshift-marketplace/certified-operators-cbhtr" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.584816 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7fc280e3-5842-4844-ad57-d3526ceeb957-utilities\") pod \"certified-operators-cbhtr\" (UID: \"7fc280e3-5842-4844-ad57-d3526ceeb957\") " pod="openshift-marketplace/certified-operators-cbhtr" Dec 05 05:54:28 crc kubenswrapper[4742]: E1205 05:54:28.592360 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:29.092345078 +0000 UTC m=+145.004480140 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.623949 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7fc280e3-5842-4844-ad57-d3526ceeb957-catalog-content\") pod \"certified-operators-cbhtr\" (UID: \"7fc280e3-5842-4844-ad57-d3526ceeb957\") " pod="openshift-marketplace/certified-operators-cbhtr" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.627006 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7fc280e3-5842-4844-ad57-d3526ceeb957-utilities\") pod \"certified-operators-cbhtr\" (UID: \"7fc280e3-5842-4844-ad57-d3526ceeb957\") " pod="openshift-marketplace/certified-operators-cbhtr" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.683504 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cmlms\" (UniqueName: \"kubernetes.io/projected/7fc280e3-5842-4844-ad57-d3526ceeb957-kube-api-access-cmlms\") pod \"certified-operators-cbhtr\" (UID: \"7fc280e3-5842-4844-ad57-d3526ceeb957\") " pod="openshift-marketplace/certified-operators-cbhtr" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.687986 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:28 crc kubenswrapper[4742]: E1205 05:54:28.688467 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:29.188449981 +0000 UTC m=+145.100585043 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.742284 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cbhtr" Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.789159 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:28 crc kubenswrapper[4742]: E1205 05:54:28.789500 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:29.289486951 +0000 UTC m=+145.201622013 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.890566 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:28 crc kubenswrapper[4742]: E1205 05:54:28.890991 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:29.390975977 +0000 UTC m=+145.303111039 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.932764 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kjh59"] Dec 05 05:54:28 crc kubenswrapper[4742]: I1205 05:54:28.993203 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:28 crc kubenswrapper[4742]: E1205 05:54:28.993669 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:29.493646803 +0000 UTC m=+145.405781945 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.100667 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:29 crc kubenswrapper[4742]: E1205 05:54:29.100804 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:29.600780111 +0000 UTC m=+145.512915173 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.100913 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:29 crc kubenswrapper[4742]: E1205 05:54:29.101214 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:29.601202965 +0000 UTC m=+145.513338027 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.156644 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.159729 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.171571 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.175315 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.175343 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.180458 4742 patch_prober.go:28] interesting pod/router-default-5444994796-nfv6w container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 05:54:29 crc kubenswrapper[4742]: [-]has-synced failed: reason withheld Dec 05 05:54:29 crc kubenswrapper[4742]: [+]process-running ok Dec 05 05:54:29 crc kubenswrapper[4742]: healthz check failed Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.180498 4742 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nfv6w" podUID="f2d129d3-f117-492c-a680-a03e1ca560e1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.201659 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.201948 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fbcd85a4-bd70-4d39-9122-17f509e9f2bd-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"fbcd85a4-bd70-4d39-9122-17f509e9f2bd\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.202107 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fbcd85a4-bd70-4d39-9122-17f509e9f2bd-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"fbcd85a4-bd70-4d39-9122-17f509e9f2bd\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 05:54:29 crc kubenswrapper[4742]: E1205 05:54:29.202254 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:29.702235746 +0000 UTC m=+145.614370808 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.269581 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dpnl4"] Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.303789 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.303866 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fbcd85a4-bd70-4d39-9122-17f509e9f2bd-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"fbcd85a4-bd70-4d39-9122-17f509e9f2bd\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.303902 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fbcd85a4-bd70-4d39-9122-17f509e9f2bd-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"fbcd85a4-bd70-4d39-9122-17f509e9f2bd\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.303971 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fbcd85a4-bd70-4d39-9122-17f509e9f2bd-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"fbcd85a4-bd70-4d39-9122-17f509e9f2bd\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 05:54:29 crc kubenswrapper[4742]: E1205 05:54:29.304260 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:29.804249999 +0000 UTC m=+145.716385051 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.353125 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fbcd85a4-bd70-4d39-9122-17f509e9f2bd-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"fbcd85a4-bd70-4d39-9122-17f509e9f2bd\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.404919 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:29 crc kubenswrapper[4742]: E1205 05:54:29.406123 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:29.906075087 +0000 UTC m=+145.818210159 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.406755 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:29 crc kubenswrapper[4742]: E1205 05:54:29.407471 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 05:54:29.907436613 +0000 UTC m=+145.819571675 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-mp2sf" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.425773 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cbhtr"] Dec 05 05:54:29 crc kubenswrapper[4742]: W1205 05:54:29.444927 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7fc280e3_5842_4844_ad57_d3526ceeb957.slice/crio-74466f1d3cb5d0dca4c6a3247ac6fdf68994e86088b5136940caa485b4343826 WatchSource:0}: Error finding container 74466f1d3cb5d0dca4c6a3247ac6fdf68994e86088b5136940caa485b4343826: Status 404 returned error can't find the container with id 74466f1d3cb5d0dca4c6a3247ac6fdf68994e86088b5136940caa485b4343826 Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.447206 4742 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.469792 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dpnl4" event={"ID":"8eed6205-7703-433f-83cf-d7b51867e5ee","Type":"ContainerStarted","Data":"4f2f5318006687618054386d95f70e610c71438d3a6e920b2e3162314734390f"} Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.476335 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cbhtr" event={"ID":"7fc280e3-5842-4844-ad57-d3526ceeb957","Type":"ContainerStarted","Data":"74466f1d3cb5d0dca4c6a3247ac6fdf68994e86088b5136940caa485b4343826"} Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.495800 4742 generic.go:334] "Generic (PLEG): container finished" podID="ff4c4d35-276e-47e9-8b12-76361e2005bf" containerID="9e73b614ad427cfab372b9af0d28172ad095ca0be98cea2314c28dd7a960d7bd" exitCode=0 Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.495919 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kjh59" event={"ID":"ff4c4d35-276e-47e9-8b12-76361e2005bf","Type":"ContainerDied","Data":"9e73b614ad427cfab372b9af0d28172ad095ca0be98cea2314c28dd7a960d7bd"} Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.495952 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kjh59" event={"ID":"ff4c4d35-276e-47e9-8b12-76361e2005bf","Type":"ContainerStarted","Data":"3fa7c9e56854e1eca485a44e9a455701762e085655f3daae3789a8d0b06e5024"} Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.499312 4742 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.508834 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:29 crc kubenswrapper[4742]: E1205 05:54:29.509359 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 05:54:30.009338233 +0000 UTC m=+145.921473295 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.517905 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-p8pxx" event={"ID":"e383eaae-c654-4e64-be23-cb7a9cef6df7","Type":"ContainerStarted","Data":"e3bd9101a0cdc714977dc7f6411c9c78f58bf742b87ae3e817ef6bb5ea2baea0"} Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.533299 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.534413 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-flkxk" Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.542154 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-99jn8"] Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.569677 4742 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-05T05:54:29.447230058Z","Handler":null,"Name":""} Dec 05 05:54:29 crc kubenswrapper[4742]: W1205 05:54:29.597268 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode64b157e_a58d_4dfd_8d97_be73077a1e25.slice/crio-4c26f167dd43ede8da5696552047e877a09dd46852dbe95ac4fd754300b8e556 WatchSource:0}: Error finding container 4c26f167dd43ede8da5696552047e877a09dd46852dbe95ac4fd754300b8e556: Status 404 returned error can't find the container with id 4c26f167dd43ede8da5696552047e877a09dd46852dbe95ac4fd754300b8e556 Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.597427 4742 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.597453 4742 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.625635 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.703265 4742 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.703538 4742 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.795289 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-mp2sf\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.829746 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.963624 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-p8pxx" podStartSLOduration=10.963605982 podStartE2EDuration="10.963605982s" podCreationTimestamp="2025-12-05 05:54:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:29.659632271 +0000 UTC m=+145.571767333" watchObservedRunningTime="2025-12-05 05:54:29.963605982 +0000 UTC m=+145.875741044" Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.966877 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-89q4d"] Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.968181 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-89q4d" Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.975477 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.981508 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-89q4d"] Dec 05 05:54:29 crc kubenswrapper[4742]: I1205 05:54:29.990151 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.038641 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnmlc\" (UniqueName: \"kubernetes.io/projected/d8ed5ac0-ecdc-4f4f-a13b-223289da1f67-kube-api-access-pnmlc\") pod \"redhat-marketplace-89q4d\" (UID: \"d8ed5ac0-ecdc-4f4f-a13b-223289da1f67\") " pod="openshift-marketplace/redhat-marketplace-89q4d" Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.038723 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8ed5ac0-ecdc-4f4f-a13b-223289da1f67-utilities\") pod \"redhat-marketplace-89q4d\" (UID: \"d8ed5ac0-ecdc-4f4f-a13b-223289da1f67\") " pod="openshift-marketplace/redhat-marketplace-89q4d" Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.038756 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8ed5ac0-ecdc-4f4f-a13b-223289da1f67-catalog-content\") pod \"redhat-marketplace-89q4d\" (UID: \"d8ed5ac0-ecdc-4f4f-a13b-223289da1f67\") " pod="openshift-marketplace/redhat-marketplace-89q4d" Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.067822 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.139367 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8ed5ac0-ecdc-4f4f-a13b-223289da1f67-utilities\") pod \"redhat-marketplace-89q4d\" (UID: \"d8ed5ac0-ecdc-4f4f-a13b-223289da1f67\") " pod="openshift-marketplace/redhat-marketplace-89q4d" Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.139424 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8ed5ac0-ecdc-4f4f-a13b-223289da1f67-catalog-content\") pod \"redhat-marketplace-89q4d\" (UID: \"d8ed5ac0-ecdc-4f4f-a13b-223289da1f67\") " pod="openshift-marketplace/redhat-marketplace-89q4d" Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.139493 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnmlc\" (UniqueName: \"kubernetes.io/projected/d8ed5ac0-ecdc-4f4f-a13b-223289da1f67-kube-api-access-pnmlc\") pod \"redhat-marketplace-89q4d\" (UID: \"d8ed5ac0-ecdc-4f4f-a13b-223289da1f67\") " pod="openshift-marketplace/redhat-marketplace-89q4d" Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.139914 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8ed5ac0-ecdc-4f4f-a13b-223289da1f67-utilities\") pod \"redhat-marketplace-89q4d\" (UID: \"d8ed5ac0-ecdc-4f4f-a13b-223289da1f67\") " pod="openshift-marketplace/redhat-marketplace-89q4d" Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.139912 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8ed5ac0-ecdc-4f4f-a13b-223289da1f67-catalog-content\") pod \"redhat-marketplace-89q4d\" (UID: \"d8ed5ac0-ecdc-4f4f-a13b-223289da1f67\") " pod="openshift-marketplace/redhat-marketplace-89q4d" Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.172132 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnmlc\" (UniqueName: \"kubernetes.io/projected/d8ed5ac0-ecdc-4f4f-a13b-223289da1f67-kube-api-access-pnmlc\") pod \"redhat-marketplace-89q4d\" (UID: \"d8ed5ac0-ecdc-4f4f-a13b-223289da1f67\") " pod="openshift-marketplace/redhat-marketplace-89q4d" Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.174814 4742 patch_prober.go:28] interesting pod/router-default-5444994796-nfv6w container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 05:54:30 crc kubenswrapper[4742]: [-]has-synced failed: reason withheld Dec 05 05:54:30 crc kubenswrapper[4742]: [+]process-running ok Dec 05 05:54:30 crc kubenswrapper[4742]: healthz check failed Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.174855 4742 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nfv6w" podUID="f2d129d3-f117-492c-a680-a03e1ca560e1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.201205 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.288342 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-mp2sf"] Dec 05 05:54:30 crc kubenswrapper[4742]: W1205 05:54:30.298268 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode9d19072_f6f9_42da_8b86_5d6bff4b340c.slice/crio-72cc8594c443119241f9d5001fa69f1d4c6fb97841032b078fdafb3affa8142d WatchSource:0}: Error finding container 72cc8594c443119241f9d5001fa69f1d4c6fb97841032b078fdafb3affa8142d: Status 404 returned error can't find the container with id 72cc8594c443119241f9d5001fa69f1d4c6fb97841032b078fdafb3affa8142d Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.303708 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-89q4d" Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.361265 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pwkx9"] Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.362697 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pwkx9" Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.370438 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pwkx9"] Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.390036 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.442872 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c433daac-2067-47ed-ba5c-01ae452a511d-catalog-content\") pod \"redhat-marketplace-pwkx9\" (UID: \"c433daac-2067-47ed-ba5c-01ae452a511d\") " pod="openshift-marketplace/redhat-marketplace-pwkx9" Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.443012 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c433daac-2067-47ed-ba5c-01ae452a511d-utilities\") pod \"redhat-marketplace-pwkx9\" (UID: \"c433daac-2067-47ed-ba5c-01ae452a511d\") " pod="openshift-marketplace/redhat-marketplace-pwkx9" Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.443048 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7l2w\" (UniqueName: \"kubernetes.io/projected/c433daac-2067-47ed-ba5c-01ae452a511d-kube-api-access-r7l2w\") pod \"redhat-marketplace-pwkx9\" (UID: \"c433daac-2067-47ed-ba5c-01ae452a511d\") " pod="openshift-marketplace/redhat-marketplace-pwkx9" Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.513099 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-89q4d"] Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.523243 4742 generic.go:334] "Generic (PLEG): container finished" podID="ebaea921-5d50-4d64-b73e-db0feab77248" containerID="f852d749ea8342bd3d3752c9c9b2516fd2c9158e32d518022257f1233a574cd3" exitCode=0 Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.523297 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-grqtz" event={"ID":"ebaea921-5d50-4d64-b73e-db0feab77248","Type":"ContainerDied","Data":"f852d749ea8342bd3d3752c9c9b2516fd2c9158e32d518022257f1233a574cd3"} Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.525005 4742 generic.go:334] "Generic (PLEG): container finished" podID="8eed6205-7703-433f-83cf-d7b51867e5ee" containerID="6473ab8a47f5feeb54252d32d91513201c9acf8e5c5cdb42530637a361cf8408" exitCode=0 Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.525045 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dpnl4" event={"ID":"8eed6205-7703-433f-83cf-d7b51867e5ee","Type":"ContainerDied","Data":"6473ab8a47f5feeb54252d32d91513201c9acf8e5c5cdb42530637a361cf8408"} Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.527226 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" event={"ID":"e9d19072-f6f9-42da-8b86-5d6bff4b340c","Type":"ContainerStarted","Data":"72cc8594c443119241f9d5001fa69f1d4c6fb97841032b078fdafb3affa8142d"} Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.529219 4742 generic.go:334] "Generic (PLEG): container finished" podID="7fc280e3-5842-4844-ad57-d3526ceeb957" containerID="fc1bad23496f588771e7d0a2613908887d50daf49cf7ccb0ab803a7b628f7fa9" exitCode=0 Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.529298 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cbhtr" event={"ID":"7fc280e3-5842-4844-ad57-d3526ceeb957","Type":"ContainerDied","Data":"fc1bad23496f588771e7d0a2613908887d50daf49cf7ccb0ab803a7b628f7fa9"} Dec 05 05:54:30 crc kubenswrapper[4742]: W1205 05:54:30.531031 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd8ed5ac0_ecdc_4f4f_a13b_223289da1f67.slice/crio-290a114c945b0a075acbd763cd261322d49ea2a763e35da2d9cad64fb3c4dbd1 WatchSource:0}: Error finding container 290a114c945b0a075acbd763cd261322d49ea2a763e35da2d9cad64fb3c4dbd1: Status 404 returned error can't find the container with id 290a114c945b0a075acbd763cd261322d49ea2a763e35da2d9cad64fb3c4dbd1 Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.534063 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"fbcd85a4-bd70-4d39-9122-17f509e9f2bd","Type":"ContainerStarted","Data":"b55f72feb50f0aefc92c275c16cf60e1a0b11c4583e6d108f7d5684b620dec9a"} Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.536017 4742 generic.go:334] "Generic (PLEG): container finished" podID="e64b157e-a58d-4dfd-8d97-be73077a1e25" containerID="06c454304dd31229143cdc44197155bb019079a41933852da592819711645824" exitCode=0 Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.536083 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-99jn8" event={"ID":"e64b157e-a58d-4dfd-8d97-be73077a1e25","Type":"ContainerDied","Data":"06c454304dd31229143cdc44197155bb019079a41933852da592819711645824"} Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.536107 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-99jn8" event={"ID":"e64b157e-a58d-4dfd-8d97-be73077a1e25","Type":"ContainerStarted","Data":"4c26f167dd43ede8da5696552047e877a09dd46852dbe95ac4fd754300b8e556"} Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.544025 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c433daac-2067-47ed-ba5c-01ae452a511d-utilities\") pod \"redhat-marketplace-pwkx9\" (UID: \"c433daac-2067-47ed-ba5c-01ae452a511d\") " pod="openshift-marketplace/redhat-marketplace-pwkx9" Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.544145 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7l2w\" (UniqueName: \"kubernetes.io/projected/c433daac-2067-47ed-ba5c-01ae452a511d-kube-api-access-r7l2w\") pod \"redhat-marketplace-pwkx9\" (UID: \"c433daac-2067-47ed-ba5c-01ae452a511d\") " pod="openshift-marketplace/redhat-marketplace-pwkx9" Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.544178 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c433daac-2067-47ed-ba5c-01ae452a511d-catalog-content\") pod \"redhat-marketplace-pwkx9\" (UID: \"c433daac-2067-47ed-ba5c-01ae452a511d\") " pod="openshift-marketplace/redhat-marketplace-pwkx9" Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.544691 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c433daac-2067-47ed-ba5c-01ae452a511d-catalog-content\") pod \"redhat-marketplace-pwkx9\" (UID: \"c433daac-2067-47ed-ba5c-01ae452a511d\") " pod="openshift-marketplace/redhat-marketplace-pwkx9" Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.544811 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c433daac-2067-47ed-ba5c-01ae452a511d-utilities\") pod \"redhat-marketplace-pwkx9\" (UID: \"c433daac-2067-47ed-ba5c-01ae452a511d\") " pod="openshift-marketplace/redhat-marketplace-pwkx9" Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.563562 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7l2w\" (UniqueName: \"kubernetes.io/projected/c433daac-2067-47ed-ba5c-01ae452a511d-kube-api-access-r7l2w\") pod \"redhat-marketplace-pwkx9\" (UID: \"c433daac-2067-47ed-ba5c-01ae452a511d\") " pod="openshift-marketplace/redhat-marketplace-pwkx9" Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.689784 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pwkx9" Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.966337 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gwx44"] Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.967862 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gwx44" Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.970086 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 05:54:30 crc kubenswrapper[4742]: I1205 05:54:30.989220 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gwx44"] Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.053003 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c-catalog-content\") pod \"redhat-operators-gwx44\" (UID: \"04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c\") " pod="openshift-marketplace/redhat-operators-gwx44" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.053083 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q89wr\" (UniqueName: \"kubernetes.io/projected/04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c-kube-api-access-q89wr\") pod \"redhat-operators-gwx44\" (UID: \"04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c\") " pod="openshift-marketplace/redhat-operators-gwx44" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.053124 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c-utilities\") pod \"redhat-operators-gwx44\" (UID: \"04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c\") " pod="openshift-marketplace/redhat-operators-gwx44" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.069431 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pwkx9"] Dec 05 05:54:31 crc kubenswrapper[4742]: W1205 05:54:31.075722 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc433daac_2067_47ed_ba5c_01ae452a511d.slice/crio-215d4043a88775affa807a5708b5c88eb94219f142a867839e77c8ca58f751b6 WatchSource:0}: Error finding container 215d4043a88775affa807a5708b5c88eb94219f142a867839e77c8ca58f751b6: Status 404 returned error can't find the container with id 215d4043a88775affa807a5708b5c88eb94219f142a867839e77c8ca58f751b6 Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.153914 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q89wr\" (UniqueName: \"kubernetes.io/projected/04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c-kube-api-access-q89wr\") pod \"redhat-operators-gwx44\" (UID: \"04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c\") " pod="openshift-marketplace/redhat-operators-gwx44" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.154006 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c-utilities\") pod \"redhat-operators-gwx44\" (UID: \"04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c\") " pod="openshift-marketplace/redhat-operators-gwx44" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.154130 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c-catalog-content\") pod \"redhat-operators-gwx44\" (UID: \"04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c\") " pod="openshift-marketplace/redhat-operators-gwx44" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.154631 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c-utilities\") pod \"redhat-operators-gwx44\" (UID: \"04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c\") " pod="openshift-marketplace/redhat-operators-gwx44" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.154657 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c-catalog-content\") pod \"redhat-operators-gwx44\" (UID: \"04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c\") " pod="openshift-marketplace/redhat-operators-gwx44" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.174438 4742 patch_prober.go:28] interesting pod/router-default-5444994796-nfv6w container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 05:54:31 crc kubenswrapper[4742]: [-]has-synced failed: reason withheld Dec 05 05:54:31 crc kubenswrapper[4742]: [+]process-running ok Dec 05 05:54:31 crc kubenswrapper[4742]: healthz check failed Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.174484 4742 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nfv6w" podUID="f2d129d3-f117-492c-a680-a03e1ca560e1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.175142 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q89wr\" (UniqueName: \"kubernetes.io/projected/04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c-kube-api-access-q89wr\") pod \"redhat-operators-gwx44\" (UID: \"04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c\") " pod="openshift-marketplace/redhat-operators-gwx44" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.283186 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gwx44" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.357019 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.358454 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.358644 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.358894 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.359049 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.363193 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.364561 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.365859 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7kmqp"] Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.366498 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.366932 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7kmqp" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.380535 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7kmqp"] Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.397912 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.399105 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.413515 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.461335 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43ebbfe9-74ee-405b-82d8-d4a825a7386d-catalog-content\") pod \"redhat-operators-7kmqp\" (UID: \"43ebbfe9-74ee-405b-82d8-d4a825a7386d\") " pod="openshift-marketplace/redhat-operators-7kmqp" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.461386 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43ebbfe9-74ee-405b-82d8-d4a825a7386d-utilities\") pod \"redhat-operators-7kmqp\" (UID: \"43ebbfe9-74ee-405b-82d8-d4a825a7386d\") " pod="openshift-marketplace/redhat-operators-7kmqp" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.461447 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hvdrn\" (UniqueName: \"kubernetes.io/projected/43ebbfe9-74ee-405b-82d8-d4a825a7386d-kube-api-access-hvdrn\") pod \"redhat-operators-7kmqp\" (UID: \"43ebbfe9-74ee-405b-82d8-d4a825a7386d\") " pod="openshift-marketplace/redhat-operators-7kmqp" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.509930 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-778cz" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.509967 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-778cz" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.513067 4742 patch_prober.go:28] interesting pod/console-f9d7485db-778cz container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.19:8443/health\": dial tcp 10.217.0.19:8443: connect: connection refused" start-of-body= Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.513113 4742 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-778cz" podUID="91c75381-2f50-415e-b5c8-e1261be30bbc" containerName="console" probeResult="failure" output="Get \"https://10.217.0.19:8443/health\": dial tcp 10.217.0.19:8443: connect: connection refused" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.534803 4742 patch_prober.go:28] interesting pod/downloads-7954f5f757-gn9cx container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.534847 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-gn9cx" podUID="bfef6735-7572-4411-b37d-b194d84534de" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.534876 4742 patch_prober.go:28] interesting pod/downloads-7954f5f757-gn9cx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.534908 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-gn9cx" podUID="bfef6735-7572-4411-b37d-b194d84534de" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.541370 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gwx44"] Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.554634 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-89q4d" event={"ID":"d8ed5ac0-ecdc-4f4f-a13b-223289da1f67","Type":"ContainerDied","Data":"8271a8b518c94a2c4b43c8bf04bf6af8b77f70aab282be7f508bb77b7659bc76"} Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.554595 4742 generic.go:334] "Generic (PLEG): container finished" podID="d8ed5ac0-ecdc-4f4f-a13b-223289da1f67" containerID="8271a8b518c94a2c4b43c8bf04bf6af8b77f70aab282be7f508bb77b7659bc76" exitCode=0 Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.554829 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-89q4d" event={"ID":"d8ed5ac0-ecdc-4f4f-a13b-223289da1f67","Type":"ContainerStarted","Data":"290a114c945b0a075acbd763cd261322d49ea2a763e35da2d9cad64fb3c4dbd1"} Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.558022 4742 generic.go:334] "Generic (PLEG): container finished" podID="c433daac-2067-47ed-ba5c-01ae452a511d" containerID="a6b4433a8e42085703ea5d8e799db7cbe83bd13b040ee9772c71f338bf3b8e15" exitCode=0 Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.558088 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pwkx9" event={"ID":"c433daac-2067-47ed-ba5c-01ae452a511d","Type":"ContainerDied","Data":"a6b4433a8e42085703ea5d8e799db7cbe83bd13b040ee9772c71f338bf3b8e15"} Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.558111 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pwkx9" event={"ID":"c433daac-2067-47ed-ba5c-01ae452a511d","Type":"ContainerStarted","Data":"215d4043a88775affa807a5708b5c88eb94219f142a867839e77c8ca58f751b6"} Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.560654 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" event={"ID":"e9d19072-f6f9-42da-8b86-5d6bff4b340c","Type":"ContainerStarted","Data":"e28ee2b267b42d0dd0e560f8a11a748df5c6bf5fed2973fcd2b5731f66cd2513"} Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.561282 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.562517 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43ebbfe9-74ee-405b-82d8-d4a825a7386d-catalog-content\") pod \"redhat-operators-7kmqp\" (UID: \"43ebbfe9-74ee-405b-82d8-d4a825a7386d\") " pod="openshift-marketplace/redhat-operators-7kmqp" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.562569 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43ebbfe9-74ee-405b-82d8-d4a825a7386d-utilities\") pod \"redhat-operators-7kmqp\" (UID: \"43ebbfe9-74ee-405b-82d8-d4a825a7386d\") " pod="openshift-marketplace/redhat-operators-7kmqp" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.562653 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hvdrn\" (UniqueName: \"kubernetes.io/projected/43ebbfe9-74ee-405b-82d8-d4a825a7386d-kube-api-access-hvdrn\") pod \"redhat-operators-7kmqp\" (UID: \"43ebbfe9-74ee-405b-82d8-d4a825a7386d\") " pod="openshift-marketplace/redhat-operators-7kmqp" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.563934 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43ebbfe9-74ee-405b-82d8-d4a825a7386d-catalog-content\") pod \"redhat-operators-7kmqp\" (UID: \"43ebbfe9-74ee-405b-82d8-d4a825a7386d\") " pod="openshift-marketplace/redhat-operators-7kmqp" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.563965 4742 generic.go:334] "Generic (PLEG): container finished" podID="fbcd85a4-bd70-4d39-9122-17f509e9f2bd" containerID="78d92b2ed523e5e745860addf938fec3c9082abf22407aeeafcc59baa525ae70" exitCode=0 Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.564389 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"fbcd85a4-bd70-4d39-9122-17f509e9f2bd","Type":"ContainerDied","Data":"78d92b2ed523e5e745860addf938fec3c9082abf22407aeeafcc59baa525ae70"} Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.564661 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43ebbfe9-74ee-405b-82d8-d4a825a7386d-utilities\") pod \"redhat-operators-7kmqp\" (UID: \"43ebbfe9-74ee-405b-82d8-d4a825a7386d\") " pod="openshift-marketplace/redhat-operators-7kmqp" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.583919 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-7mhpl" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.600595 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hvdrn\" (UniqueName: \"kubernetes.io/projected/43ebbfe9-74ee-405b-82d8-d4a825a7386d-kube-api-access-hvdrn\") pod \"redhat-operators-7kmqp\" (UID: \"43ebbfe9-74ee-405b-82d8-d4a825a7386d\") " pod="openshift-marketplace/redhat-operators-7kmqp" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.600606 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" podStartSLOduration=128.600581693 podStartE2EDuration="2m8.600581693s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:54:31.596665709 +0000 UTC m=+147.508800781" watchObservedRunningTime="2025-12-05 05:54:31.600581693 +0000 UTC m=+147.512716755" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.612724 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.625330 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.643850 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.710195 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7kmqp" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.746259 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.746629 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" Dec 05 05:54:31 crc kubenswrapper[4742]: I1205 05:54:31.759377 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" Dec 05 05:54:32 crc kubenswrapper[4742]: I1205 05:54:32.058029 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-grqtz" Dec 05 05:54:32 crc kubenswrapper[4742]: I1205 05:54:32.074198 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hgbk5\" (UniqueName: \"kubernetes.io/projected/ebaea921-5d50-4d64-b73e-db0feab77248-kube-api-access-hgbk5\") pod \"ebaea921-5d50-4d64-b73e-db0feab77248\" (UID: \"ebaea921-5d50-4d64-b73e-db0feab77248\") " Dec 05 05:54:32 crc kubenswrapper[4742]: I1205 05:54:32.074254 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ebaea921-5d50-4d64-b73e-db0feab77248-secret-volume\") pod \"ebaea921-5d50-4d64-b73e-db0feab77248\" (UID: \"ebaea921-5d50-4d64-b73e-db0feab77248\") " Dec 05 05:54:32 crc kubenswrapper[4742]: I1205 05:54:32.074277 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ebaea921-5d50-4d64-b73e-db0feab77248-config-volume\") pod \"ebaea921-5d50-4d64-b73e-db0feab77248\" (UID: \"ebaea921-5d50-4d64-b73e-db0feab77248\") " Dec 05 05:54:32 crc kubenswrapper[4742]: W1205 05:54:32.079445 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-72da56ca36a0ae1cb284cea9d5f64bcd3b4090a568610239de19c75bc9b64e73 WatchSource:0}: Error finding container 72da56ca36a0ae1cb284cea9d5f64bcd3b4090a568610239de19c75bc9b64e73: Status 404 returned error can't find the container with id 72da56ca36a0ae1cb284cea9d5f64bcd3b4090a568610239de19c75bc9b64e73 Dec 05 05:54:32 crc kubenswrapper[4742]: I1205 05:54:32.080437 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ebaea921-5d50-4d64-b73e-db0feab77248-config-volume" (OuterVolumeSpecName: "config-volume") pod "ebaea921-5d50-4d64-b73e-db0feab77248" (UID: "ebaea921-5d50-4d64-b73e-db0feab77248"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:54:32 crc kubenswrapper[4742]: I1205 05:54:32.083532 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebaea921-5d50-4d64-b73e-db0feab77248-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ebaea921-5d50-4d64-b73e-db0feab77248" (UID: "ebaea921-5d50-4d64-b73e-db0feab77248"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:54:32 crc kubenswrapper[4742]: I1205 05:54:32.083861 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebaea921-5d50-4d64-b73e-db0feab77248-kube-api-access-hgbk5" (OuterVolumeSpecName: "kube-api-access-hgbk5") pod "ebaea921-5d50-4d64-b73e-db0feab77248" (UID: "ebaea921-5d50-4d64-b73e-db0feab77248"). InnerVolumeSpecName "kube-api-access-hgbk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:54:32 crc kubenswrapper[4742]: I1205 05:54:32.171918 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-nfv6w" Dec 05 05:54:32 crc kubenswrapper[4742]: I1205 05:54:32.175636 4742 patch_prober.go:28] interesting pod/router-default-5444994796-nfv6w container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 05:54:32 crc kubenswrapper[4742]: [-]has-synced failed: reason withheld Dec 05 05:54:32 crc kubenswrapper[4742]: [+]process-running ok Dec 05 05:54:32 crc kubenswrapper[4742]: healthz check failed Dec 05 05:54:32 crc kubenswrapper[4742]: I1205 05:54:32.175725 4742 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nfv6w" podUID="f2d129d3-f117-492c-a680-a03e1ca560e1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 05:54:32 crc kubenswrapper[4742]: I1205 05:54:32.176148 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hgbk5\" (UniqueName: \"kubernetes.io/projected/ebaea921-5d50-4d64-b73e-db0feab77248-kube-api-access-hgbk5\") on node \"crc\" DevicePath \"\"" Dec 05 05:54:32 crc kubenswrapper[4742]: I1205 05:54:32.176183 4742 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ebaea921-5d50-4d64-b73e-db0feab77248-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 05:54:32 crc kubenswrapper[4742]: I1205 05:54:32.176198 4742 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ebaea921-5d50-4d64-b73e-db0feab77248-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 05:54:32 crc kubenswrapper[4742]: I1205 05:54:32.247202 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7kmqp"] Dec 05 05:54:32 crc kubenswrapper[4742]: I1205 05:54:32.405816 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-dfmr9" Dec 05 05:54:32 crc kubenswrapper[4742]: I1205 05:54:32.489257 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 05:54:32 crc kubenswrapper[4742]: I1205 05:54:32.594603 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"eed4994587943a30087bfa260f742211538cf71d0c8a63e8d08093e04b553ba2"} Dec 05 05:54:32 crc kubenswrapper[4742]: I1205 05:54:32.594683 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"83b7a35342dd23c478d2a81ae9e6fdce32cd10210b63b67cbefc0297189824b0"} Dec 05 05:54:32 crc kubenswrapper[4742]: I1205 05:54:32.601482 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"a034a3644ac3f0a1bc6ae7b78353df08f71d403bbcff3b214c09939632701be1"} Dec 05 05:54:32 crc kubenswrapper[4742]: I1205 05:54:32.630022 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7kmqp" event={"ID":"43ebbfe9-74ee-405b-82d8-d4a825a7386d","Type":"ContainerStarted","Data":"2c230cc833d5787dbcae913aa9baf231c2b77833062e8f5e2afaad199b9b3080"} Dec 05 05:54:32 crc kubenswrapper[4742]: I1205 05:54:32.642202 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-grqtz" event={"ID":"ebaea921-5d50-4d64-b73e-db0feab77248","Type":"ContainerDied","Data":"e46cf99be5f2930097fe188fbff2791c2b84b189e451666141baa180b639c672"} Dec 05 05:54:32 crc kubenswrapper[4742]: I1205 05:54:32.642245 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e46cf99be5f2930097fe188fbff2791c2b84b189e451666141baa180b639c672" Dec 05 05:54:32 crc kubenswrapper[4742]: I1205 05:54:32.642320 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415225-grqtz" Dec 05 05:54:32 crc kubenswrapper[4742]: I1205 05:54:32.679887 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"72da56ca36a0ae1cb284cea9d5f64bcd3b4090a568610239de19c75bc9b64e73"} Dec 05 05:54:32 crc kubenswrapper[4742]: I1205 05:54:32.687795 4742 generic.go:334] "Generic (PLEG): container finished" podID="04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c" containerID="29fd0f4fdb9d682e2f86d4c9a064b30b672586ac8c98af56ef1ed82dbc8e97b9" exitCode=0 Dec 05 05:54:32 crc kubenswrapper[4742]: I1205 05:54:32.690629 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gwx44" event={"ID":"04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c","Type":"ContainerDied","Data":"29fd0f4fdb9d682e2f86d4c9a064b30b672586ac8c98af56ef1ed82dbc8e97b9"} Dec 05 05:54:32 crc kubenswrapper[4742]: I1205 05:54:32.690674 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gwx44" event={"ID":"04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c","Type":"ContainerStarted","Data":"dec8e0c37f74bbcccbfd618083c96e0c689aeb27e761245802a8ab61ceaa4829"} Dec 05 05:54:32 crc kubenswrapper[4742]: I1205 05:54:32.698186 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-wpv7n" Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.094233 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.175720 4742 patch_prober.go:28] interesting pod/router-default-5444994796-nfv6w container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 05:54:33 crc kubenswrapper[4742]: [-]has-synced failed: reason withheld Dec 05 05:54:33 crc kubenswrapper[4742]: [+]process-running ok Dec 05 05:54:33 crc kubenswrapper[4742]: healthz check failed Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.176187 4742 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nfv6w" podUID="f2d129d3-f117-492c-a680-a03e1ca560e1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.214837 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fbcd85a4-bd70-4d39-9122-17f509e9f2bd-kubelet-dir\") pod \"fbcd85a4-bd70-4d39-9122-17f509e9f2bd\" (UID: \"fbcd85a4-bd70-4d39-9122-17f509e9f2bd\") " Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.215106 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fbcd85a4-bd70-4d39-9122-17f509e9f2bd-kube-api-access\") pod \"fbcd85a4-bd70-4d39-9122-17f509e9f2bd\" (UID: \"fbcd85a4-bd70-4d39-9122-17f509e9f2bd\") " Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.215654 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fbcd85a4-bd70-4d39-9122-17f509e9f2bd-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "fbcd85a4-bd70-4d39-9122-17f509e9f2bd" (UID: "fbcd85a4-bd70-4d39-9122-17f509e9f2bd"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.220909 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbcd85a4-bd70-4d39-9122-17f509e9f2bd-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "fbcd85a4-bd70-4d39-9122-17f509e9f2bd" (UID: "fbcd85a4-bd70-4d39-9122-17f509e9f2bd"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.317192 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fbcd85a4-bd70-4d39-9122-17f509e9f2bd-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.317456 4742 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fbcd85a4-bd70-4d39-9122-17f509e9f2bd-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.627241 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 05:54:33 crc kubenswrapper[4742]: E1205 05:54:33.627684 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbcd85a4-bd70-4d39-9122-17f509e9f2bd" containerName="pruner" Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.627701 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbcd85a4-bd70-4d39-9122-17f509e9f2bd" containerName="pruner" Dec 05 05:54:33 crc kubenswrapper[4742]: E1205 05:54:33.627720 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebaea921-5d50-4d64-b73e-db0feab77248" containerName="collect-profiles" Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.627727 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebaea921-5d50-4d64-b73e-db0feab77248" containerName="collect-profiles" Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.627875 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbcd85a4-bd70-4d39-9122-17f509e9f2bd" containerName="pruner" Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.627898 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebaea921-5d50-4d64-b73e-db0feab77248" containerName="collect-profiles" Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.628525 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.631639 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.632775 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.636382 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.703158 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"5e4ae367e9916d3280a5c6ea289c9a8103d493292717bd18121c0b8f7bcc94d9"} Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.706807 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"fbcd85a4-bd70-4d39-9122-17f509e9f2bd","Type":"ContainerDied","Data":"b55f72feb50f0aefc92c275c16cf60e1a0b11c4583e6d108f7d5684b620dec9a"} Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.706838 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b55f72feb50f0aefc92c275c16cf60e1a0b11c4583e6d108f7d5684b620dec9a" Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.706915 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.723250 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"eca6d500fbf58a64c6f1038c5cbce8f07f3c323488242d6b09ae90519a72aebe"} Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.723764 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.728499 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/83e099a2-402d-4039-9800-5a75c631f40c-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"83e099a2-402d-4039-9800-5a75c631f40c\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.728583 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/83e099a2-402d-4039-9800-5a75c631f40c-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"83e099a2-402d-4039-9800-5a75c631f40c\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.734961 4742 generic.go:334] "Generic (PLEG): container finished" podID="43ebbfe9-74ee-405b-82d8-d4a825a7386d" containerID="c97eb8a6bf8096eace358f5c7d59e5683de6a6ef775d62fc11be7f873924e3a9" exitCode=0 Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.735979 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7kmqp" event={"ID":"43ebbfe9-74ee-405b-82d8-d4a825a7386d","Type":"ContainerDied","Data":"c97eb8a6bf8096eace358f5c7d59e5683de6a6ef775d62fc11be7f873924e3a9"} Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.830223 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/83e099a2-402d-4039-9800-5a75c631f40c-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"83e099a2-402d-4039-9800-5a75c631f40c\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.830748 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/83e099a2-402d-4039-9800-5a75c631f40c-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"83e099a2-402d-4039-9800-5a75c631f40c\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.830835 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/83e099a2-402d-4039-9800-5a75c631f40c-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"83e099a2-402d-4039-9800-5a75c631f40c\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.863806 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/83e099a2-402d-4039-9800-5a75c631f40c-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"83e099a2-402d-4039-9800-5a75c631f40c\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 05:54:33 crc kubenswrapper[4742]: I1205 05:54:33.953641 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 05:54:34 crc kubenswrapper[4742]: I1205 05:54:34.174766 4742 patch_prober.go:28] interesting pod/router-default-5444994796-nfv6w container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 05:54:34 crc kubenswrapper[4742]: [-]has-synced failed: reason withheld Dec 05 05:54:34 crc kubenswrapper[4742]: [+]process-running ok Dec 05 05:54:34 crc kubenswrapper[4742]: healthz check failed Dec 05 05:54:34 crc kubenswrapper[4742]: I1205 05:54:34.175111 4742 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nfv6w" podUID="f2d129d3-f117-492c-a680-a03e1ca560e1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 05:54:34 crc kubenswrapper[4742]: I1205 05:54:34.584071 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 05:54:34 crc kubenswrapper[4742]: W1205 05:54:34.656948 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod83e099a2_402d_4039_9800_5a75c631f40c.slice/crio-facc8833591d70a6946473eea78124e2b96f642bdc2eb105023f4974ec80b44c WatchSource:0}: Error finding container facc8833591d70a6946473eea78124e2b96f642bdc2eb105023f4974ec80b44c: Status 404 returned error can't find the container with id facc8833591d70a6946473eea78124e2b96f642bdc2eb105023f4974ec80b44c Dec 05 05:54:34 crc kubenswrapper[4742]: I1205 05:54:34.767851 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"83e099a2-402d-4039-9800-5a75c631f40c","Type":"ContainerStarted","Data":"facc8833591d70a6946473eea78124e2b96f642bdc2eb105023f4974ec80b44c"} Dec 05 05:54:35 crc kubenswrapper[4742]: I1205 05:54:35.174574 4742 patch_prober.go:28] interesting pod/router-default-5444994796-nfv6w container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 05:54:35 crc kubenswrapper[4742]: [-]has-synced failed: reason withheld Dec 05 05:54:35 crc kubenswrapper[4742]: [+]process-running ok Dec 05 05:54:35 crc kubenswrapper[4742]: healthz check failed Dec 05 05:54:35 crc kubenswrapper[4742]: I1205 05:54:35.174666 4742 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nfv6w" podUID="f2d129d3-f117-492c-a680-a03e1ca560e1" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 05:54:35 crc kubenswrapper[4742]: I1205 05:54:35.784472 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"83e099a2-402d-4039-9800-5a75c631f40c","Type":"ContainerStarted","Data":"55aefad370da76521e9188194d3aadbe01af5caa2bfd8ebd1f16bcde58669b98"} Dec 05 05:54:36 crc kubenswrapper[4742]: I1205 05:54:36.175041 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-nfv6w" Dec 05 05:54:36 crc kubenswrapper[4742]: I1205 05:54:36.177633 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-nfv6w" Dec 05 05:54:36 crc kubenswrapper[4742]: I1205 05:54:36.805436 4742 generic.go:334] "Generic (PLEG): container finished" podID="83e099a2-402d-4039-9800-5a75c631f40c" containerID="55aefad370da76521e9188194d3aadbe01af5caa2bfd8ebd1f16bcde58669b98" exitCode=0 Dec 05 05:54:36 crc kubenswrapper[4742]: I1205 05:54:36.805546 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"83e099a2-402d-4039-9800-5a75c631f40c","Type":"ContainerDied","Data":"55aefad370da76521e9188194d3aadbe01af5caa2bfd8ebd1f16bcde58669b98"} Dec 05 05:54:37 crc kubenswrapper[4742]: I1205 05:54:37.418504 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-rd6kc" Dec 05 05:54:41 crc kubenswrapper[4742]: I1205 05:54:41.538795 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-gn9cx" Dec 05 05:54:41 crc kubenswrapper[4742]: I1205 05:54:41.571352 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-778cz" Dec 05 05:54:41 crc kubenswrapper[4742]: I1205 05:54:41.576863 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-778cz" Dec 05 05:54:44 crc kubenswrapper[4742]: I1205 05:54:44.272605 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 05:54:44 crc kubenswrapper[4742]: I1205 05:54:44.426160 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/83e099a2-402d-4039-9800-5a75c631f40c-kubelet-dir\") pod \"83e099a2-402d-4039-9800-5a75c631f40c\" (UID: \"83e099a2-402d-4039-9800-5a75c631f40c\") " Dec 05 05:54:44 crc kubenswrapper[4742]: I1205 05:54:44.426228 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/83e099a2-402d-4039-9800-5a75c631f40c-kube-api-access\") pod \"83e099a2-402d-4039-9800-5a75c631f40c\" (UID: \"83e099a2-402d-4039-9800-5a75c631f40c\") " Dec 05 05:54:44 crc kubenswrapper[4742]: I1205 05:54:44.426304 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/83e099a2-402d-4039-9800-5a75c631f40c-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "83e099a2-402d-4039-9800-5a75c631f40c" (UID: "83e099a2-402d-4039-9800-5a75c631f40c"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:54:44 crc kubenswrapper[4742]: I1205 05:54:44.426508 4742 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/83e099a2-402d-4039-9800-5a75c631f40c-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 05:54:44 crc kubenswrapper[4742]: I1205 05:54:44.431311 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83e099a2-402d-4039-9800-5a75c631f40c-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "83e099a2-402d-4039-9800-5a75c631f40c" (UID: "83e099a2-402d-4039-9800-5a75c631f40c"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:54:44 crc kubenswrapper[4742]: I1205 05:54:44.528834 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/83e099a2-402d-4039-9800-5a75c631f40c-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 05:54:44 crc kubenswrapper[4742]: I1205 05:54:44.872774 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"83e099a2-402d-4039-9800-5a75c631f40c","Type":"ContainerDied","Data":"facc8833591d70a6946473eea78124e2b96f642bdc2eb105023f4974ec80b44c"} Dec 05 05:54:44 crc kubenswrapper[4742]: I1205 05:54:44.872830 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="facc8833591d70a6946473eea78124e2b96f642bdc2eb105023f4974ec80b44c" Dec 05 05:54:44 crc kubenswrapper[4742]: I1205 05:54:44.872844 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 05:54:45 crc kubenswrapper[4742]: I1205 05:54:45.947341 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b69352e1-2d48-4211-83e1-25d09fff9d3c-metrics-certs\") pod \"network-metrics-daemon-pbtb4\" (UID: \"b69352e1-2d48-4211-83e1-25d09fff9d3c\") " pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:54:45 crc kubenswrapper[4742]: I1205 05:54:45.952764 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b69352e1-2d48-4211-83e1-25d09fff9d3c-metrics-certs\") pod \"network-metrics-daemon-pbtb4\" (UID: \"b69352e1-2d48-4211-83e1-25d09fff9d3c\") " pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:54:46 crc kubenswrapper[4742]: I1205 05:54:46.104323 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pbtb4" Dec 05 05:54:46 crc kubenswrapper[4742]: I1205 05:54:46.671441 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:54:46 crc kubenswrapper[4742]: I1205 05:54:46.671516 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:54:50 crc kubenswrapper[4742]: I1205 05:54:50.075415 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:55:03 crc kubenswrapper[4742]: I1205 05:55:03.305305 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-z8tpl" Dec 05 05:55:08 crc kubenswrapper[4742]: I1205 05:55:08.225788 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 05:55:08 crc kubenswrapper[4742]: E1205 05:55:08.227356 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83e099a2-402d-4039-9800-5a75c631f40c" containerName="pruner" Dec 05 05:55:08 crc kubenswrapper[4742]: I1205 05:55:08.227387 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="83e099a2-402d-4039-9800-5a75c631f40c" containerName="pruner" Dec 05 05:55:08 crc kubenswrapper[4742]: I1205 05:55:08.227616 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="83e099a2-402d-4039-9800-5a75c631f40c" containerName="pruner" Dec 05 05:55:08 crc kubenswrapper[4742]: I1205 05:55:08.228495 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 05:55:08 crc kubenswrapper[4742]: I1205 05:55:08.232996 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 05 05:55:08 crc kubenswrapper[4742]: I1205 05:55:08.234743 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 05 05:55:08 crc kubenswrapper[4742]: I1205 05:55:08.252617 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 05:55:08 crc kubenswrapper[4742]: I1205 05:55:08.342188 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/711aa3ef-6d85-4813-8182-facf9c865c4e-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"711aa3ef-6d85-4813-8182-facf9c865c4e\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 05:55:08 crc kubenswrapper[4742]: I1205 05:55:08.342239 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/711aa3ef-6d85-4813-8182-facf9c865c4e-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"711aa3ef-6d85-4813-8182-facf9c865c4e\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 05:55:08 crc kubenswrapper[4742]: I1205 05:55:08.443749 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/711aa3ef-6d85-4813-8182-facf9c865c4e-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"711aa3ef-6d85-4813-8182-facf9c865c4e\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 05:55:08 crc kubenswrapper[4742]: I1205 05:55:08.443820 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/711aa3ef-6d85-4813-8182-facf9c865c4e-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"711aa3ef-6d85-4813-8182-facf9c865c4e\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 05:55:08 crc kubenswrapper[4742]: I1205 05:55:08.443955 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/711aa3ef-6d85-4813-8182-facf9c865c4e-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"711aa3ef-6d85-4813-8182-facf9c865c4e\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 05:55:08 crc kubenswrapper[4742]: I1205 05:55:08.480459 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/711aa3ef-6d85-4813-8182-facf9c865c4e-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"711aa3ef-6d85-4813-8182-facf9c865c4e\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 05:55:08 crc kubenswrapper[4742]: I1205 05:55:08.604759 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 05:55:11 crc kubenswrapper[4742]: I1205 05:55:11.618509 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 05:55:11 crc kubenswrapper[4742]: I1205 05:55:11.720509 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-pbtb4"] Dec 05 05:55:13 crc kubenswrapper[4742]: I1205 05:55:13.621671 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 05:55:13 crc kubenswrapper[4742]: I1205 05:55:13.626425 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 05:55:13 crc kubenswrapper[4742]: I1205 05:55:13.634103 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 05:55:13 crc kubenswrapper[4742]: I1205 05:55:13.727754 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/7bb19c95-d3a5-4418-8787-82e424244073-var-lock\") pod \"installer-9-crc\" (UID: \"7bb19c95-d3a5-4418-8787-82e424244073\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 05:55:13 crc kubenswrapper[4742]: I1205 05:55:13.727849 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7bb19c95-d3a5-4418-8787-82e424244073-kube-api-access\") pod \"installer-9-crc\" (UID: \"7bb19c95-d3a5-4418-8787-82e424244073\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 05:55:13 crc kubenswrapper[4742]: I1205 05:55:13.727915 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7bb19c95-d3a5-4418-8787-82e424244073-kubelet-dir\") pod \"installer-9-crc\" (UID: \"7bb19c95-d3a5-4418-8787-82e424244073\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 05:55:13 crc kubenswrapper[4742]: I1205 05:55:13.829576 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7bb19c95-d3a5-4418-8787-82e424244073-kubelet-dir\") pod \"installer-9-crc\" (UID: \"7bb19c95-d3a5-4418-8787-82e424244073\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 05:55:13 crc kubenswrapper[4742]: I1205 05:55:13.829761 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/7bb19c95-d3a5-4418-8787-82e424244073-var-lock\") pod \"installer-9-crc\" (UID: \"7bb19c95-d3a5-4418-8787-82e424244073\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 05:55:13 crc kubenswrapper[4742]: I1205 05:55:13.829805 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7bb19c95-d3a5-4418-8787-82e424244073-kube-api-access\") pod \"installer-9-crc\" (UID: \"7bb19c95-d3a5-4418-8787-82e424244073\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 05:55:13 crc kubenswrapper[4742]: I1205 05:55:13.829959 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/7bb19c95-d3a5-4418-8787-82e424244073-var-lock\") pod \"installer-9-crc\" (UID: \"7bb19c95-d3a5-4418-8787-82e424244073\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 05:55:13 crc kubenswrapper[4742]: I1205 05:55:13.829985 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7bb19c95-d3a5-4418-8787-82e424244073-kubelet-dir\") pod \"installer-9-crc\" (UID: \"7bb19c95-d3a5-4418-8787-82e424244073\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 05:55:13 crc kubenswrapper[4742]: I1205 05:55:13.869049 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7bb19c95-d3a5-4418-8787-82e424244073-kube-api-access\") pod \"installer-9-crc\" (UID: \"7bb19c95-d3a5-4418-8787-82e424244073\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 05:55:13 crc kubenswrapper[4742]: I1205 05:55:13.966245 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 05:55:16 crc kubenswrapper[4742]: I1205 05:55:16.670579 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:55:16 crc kubenswrapper[4742]: I1205 05:55:16.670983 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:55:23 crc kubenswrapper[4742]: E1205 05:55:23.718710 4742 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 05 05:55:23 crc kubenswrapper[4742]: E1205 05:55:23.719349 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-q89wr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-gwx44_openshift-marketplace(04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 05:55:23 crc kubenswrapper[4742]: E1205 05:55:23.720513 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-gwx44" podUID="04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c" Dec 05 05:55:25 crc kubenswrapper[4742]: E1205 05:55:25.358475 4742 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 05 05:55:25 crc kubenswrapper[4742]: E1205 05:55:25.358988 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hn2bp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-dpnl4_openshift-marketplace(8eed6205-7703-433f-83cf-d7b51867e5ee): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 05:55:25 crc kubenswrapper[4742]: E1205 05:55:25.361823 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-dpnl4" podUID="8eed6205-7703-433f-83cf-d7b51867e5ee" Dec 05 05:55:25 crc kubenswrapper[4742]: E1205 05:55:25.552619 4742 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 05 05:55:25 crc kubenswrapper[4742]: E1205 05:55:25.552891 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hvdrn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-7kmqp_openshift-marketplace(43ebbfe9-74ee-405b-82d8-d4a825a7386d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 05:55:25 crc kubenswrapper[4742]: E1205 05:55:25.554841 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-7kmqp" podUID="43ebbfe9-74ee-405b-82d8-d4a825a7386d" Dec 05 05:55:28 crc kubenswrapper[4742]: E1205 05:55:28.513897 4742 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 05 05:55:28 crc kubenswrapper[4742]: E1205 05:55:28.514491 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-r7l2w,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-pwkx9_openshift-marketplace(c433daac-2067-47ed-ba5c-01ae452a511d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 05:55:28 crc kubenswrapper[4742]: E1205 05:55:28.516296 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-pwkx9" podUID="c433daac-2067-47ed-ba5c-01ae452a511d" Dec 05 05:55:28 crc kubenswrapper[4742]: E1205 05:55:28.553076 4742 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 05 05:55:28 crc kubenswrapper[4742]: E1205 05:55:28.553236 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cmlms,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-cbhtr_openshift-marketplace(7fc280e3-5842-4844-ad57-d3526ceeb957): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 05:55:28 crc kubenswrapper[4742]: E1205 05:55:28.554789 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-cbhtr" podUID="7fc280e3-5842-4844-ad57-d3526ceeb957" Dec 05 05:55:30 crc kubenswrapper[4742]: E1205 05:55:30.339335 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-gwx44" podUID="04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c" Dec 05 05:55:30 crc kubenswrapper[4742]: E1205 05:55:30.339812 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-7kmqp" podUID="43ebbfe9-74ee-405b-82d8-d4a825a7386d" Dec 05 05:55:30 crc kubenswrapper[4742]: E1205 05:55:30.339853 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-dpnl4" podUID="8eed6205-7703-433f-83cf-d7b51867e5ee" Dec 05 05:55:30 crc kubenswrapper[4742]: E1205 05:55:30.339957 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-cbhtr" podUID="7fc280e3-5842-4844-ad57-d3526ceeb957" Dec 05 05:55:30 crc kubenswrapper[4742]: E1205 05:55:30.340000 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-pwkx9" podUID="c433daac-2067-47ed-ba5c-01ae452a511d" Dec 05 05:55:30 crc kubenswrapper[4742]: E1205 05:55:30.412177 4742 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 05 05:55:30 crc kubenswrapper[4742]: E1205 05:55:30.412496 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pnmlc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-89q4d_openshift-marketplace(d8ed5ac0-ecdc-4f4f-a13b-223289da1f67): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 05:55:30 crc kubenswrapper[4742]: E1205 05:55:30.413660 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-89q4d" podUID="d8ed5ac0-ecdc-4f4f-a13b-223289da1f67" Dec 05 05:55:30 crc kubenswrapper[4742]: E1205 05:55:30.439542 4742 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 05 05:55:30 crc kubenswrapper[4742]: E1205 05:55:30.439712 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rzhsr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-99jn8_openshift-marketplace(e64b157e-a58d-4dfd-8d97-be73077a1e25): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 05:55:30 crc kubenswrapper[4742]: E1205 05:55:30.440870 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-99jn8" podUID="e64b157e-a58d-4dfd-8d97-be73077a1e25" Dec 05 05:55:30 crc kubenswrapper[4742]: E1205 05:55:30.441239 4742 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 05 05:55:30 crc kubenswrapper[4742]: E1205 05:55:30.441332 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pgz9p,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-kjh59_openshift-marketplace(ff4c4d35-276e-47e9-8b12-76361e2005bf): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 05:55:30 crc kubenswrapper[4742]: E1205 05:55:30.442460 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-kjh59" podUID="ff4c4d35-276e-47e9-8b12-76361e2005bf" Dec 05 05:55:30 crc kubenswrapper[4742]: I1205 05:55:30.545672 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 05:55:30 crc kubenswrapper[4742]: I1205 05:55:30.796787 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 05:55:31 crc kubenswrapper[4742]: I1205 05:55:31.216513 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-pbtb4" event={"ID":"b69352e1-2d48-4211-83e1-25d09fff9d3c","Type":"ContainerStarted","Data":"834189bd0b053d793e062829d554c2aa35ad2673e2a0be462b67898e01aec54c"} Dec 05 05:55:31 crc kubenswrapper[4742]: I1205 05:55:31.216809 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-pbtb4" event={"ID":"b69352e1-2d48-4211-83e1-25d09fff9d3c","Type":"ContainerStarted","Data":"c05c0c75c87839adac30765291a55d7ca4aa813812a6c8b7d7d78845b1d69dfa"} Dec 05 05:55:31 crc kubenswrapper[4742]: I1205 05:55:31.216819 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-pbtb4" event={"ID":"b69352e1-2d48-4211-83e1-25d09fff9d3c","Type":"ContainerStarted","Data":"2de7c9dfe79eb5c6a046c8e0792f960dc457744317f2b3dce42c729e39cb32ce"} Dec 05 05:55:31 crc kubenswrapper[4742]: I1205 05:55:31.218129 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"7bb19c95-d3a5-4418-8787-82e424244073","Type":"ContainerStarted","Data":"7de69cb7c452679d77d80eb05947cc6ce3fdc1fbe650c1f12bce8172f71f53d5"} Dec 05 05:55:31 crc kubenswrapper[4742]: I1205 05:55:31.218188 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"7bb19c95-d3a5-4418-8787-82e424244073","Type":"ContainerStarted","Data":"86e001fcbfc81c7559e1b2b18d5a51572d3256c2d32092d8506114262aeb605a"} Dec 05 05:55:31 crc kubenswrapper[4742]: I1205 05:55:31.219740 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"711aa3ef-6d85-4813-8182-facf9c865c4e","Type":"ContainerStarted","Data":"92b3d7d464286ff5bd00e3f4b2dc29e352e900df3d21405dda663051f9de0e13"} Dec 05 05:55:31 crc kubenswrapper[4742]: I1205 05:55:31.219781 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"711aa3ef-6d85-4813-8182-facf9c865c4e","Type":"ContainerStarted","Data":"e615c6da8f7e4feafd47b62bdba9aeb4e032fc694f470625207f347aefeb0e49"} Dec 05 05:55:31 crc kubenswrapper[4742]: E1205 05:55:31.220887 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-kjh59" podUID="ff4c4d35-276e-47e9-8b12-76361e2005bf" Dec 05 05:55:31 crc kubenswrapper[4742]: E1205 05:55:31.221648 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-89q4d" podUID="d8ed5ac0-ecdc-4f4f-a13b-223289da1f67" Dec 05 05:55:31 crc kubenswrapper[4742]: E1205 05:55:31.221682 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-99jn8" podUID="e64b157e-a58d-4dfd-8d97-be73077a1e25" Dec 05 05:55:31 crc kubenswrapper[4742]: I1205 05:55:31.232543 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-pbtb4" podStartSLOduration=188.232027863 podStartE2EDuration="3m8.232027863s" podCreationTimestamp="2025-12-05 05:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:55:31.228899625 +0000 UTC m=+207.141034737" watchObservedRunningTime="2025-12-05 05:55:31.232027863 +0000 UTC m=+207.144162925" Dec 05 05:55:31 crc kubenswrapper[4742]: I1205 05:55:31.281131 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=18.281116429 podStartE2EDuration="18.281116429s" podCreationTimestamp="2025-12-05 05:55:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:55:31.279070911 +0000 UTC m=+207.191205993" watchObservedRunningTime="2025-12-05 05:55:31.281116429 +0000 UTC m=+207.193251491" Dec 05 05:55:31 crc kubenswrapper[4742]: I1205 05:55:31.313800 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=23.313783801 podStartE2EDuration="23.313783801s" podCreationTimestamp="2025-12-05 05:55:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:55:31.312897646 +0000 UTC m=+207.225032708" watchObservedRunningTime="2025-12-05 05:55:31.313783801 +0000 UTC m=+207.225918863" Dec 05 05:55:32 crc kubenswrapper[4742]: I1205 05:55:32.226523 4742 generic.go:334] "Generic (PLEG): container finished" podID="711aa3ef-6d85-4813-8182-facf9c865c4e" containerID="92b3d7d464286ff5bd00e3f4b2dc29e352e900df3d21405dda663051f9de0e13" exitCode=0 Dec 05 05:55:32 crc kubenswrapper[4742]: I1205 05:55:32.226626 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"711aa3ef-6d85-4813-8182-facf9c865c4e","Type":"ContainerDied","Data":"92b3d7d464286ff5bd00e3f4b2dc29e352e900df3d21405dda663051f9de0e13"} Dec 05 05:55:33 crc kubenswrapper[4742]: I1205 05:55:33.502797 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 05:55:33 crc kubenswrapper[4742]: I1205 05:55:33.655384 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/711aa3ef-6d85-4813-8182-facf9c865c4e-kubelet-dir\") pod \"711aa3ef-6d85-4813-8182-facf9c865c4e\" (UID: \"711aa3ef-6d85-4813-8182-facf9c865c4e\") " Dec 05 05:55:33 crc kubenswrapper[4742]: I1205 05:55:33.655450 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/711aa3ef-6d85-4813-8182-facf9c865c4e-kube-api-access\") pod \"711aa3ef-6d85-4813-8182-facf9c865c4e\" (UID: \"711aa3ef-6d85-4813-8182-facf9c865c4e\") " Dec 05 05:55:33 crc kubenswrapper[4742]: I1205 05:55:33.655518 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/711aa3ef-6d85-4813-8182-facf9c865c4e-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "711aa3ef-6d85-4813-8182-facf9c865c4e" (UID: "711aa3ef-6d85-4813-8182-facf9c865c4e"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:55:33 crc kubenswrapper[4742]: I1205 05:55:33.655752 4742 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/711aa3ef-6d85-4813-8182-facf9c865c4e-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 05:55:33 crc kubenswrapper[4742]: I1205 05:55:33.664268 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/711aa3ef-6d85-4813-8182-facf9c865c4e-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "711aa3ef-6d85-4813-8182-facf9c865c4e" (UID: "711aa3ef-6d85-4813-8182-facf9c865c4e"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:55:33 crc kubenswrapper[4742]: I1205 05:55:33.756898 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/711aa3ef-6d85-4813-8182-facf9c865c4e-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 05:55:34 crc kubenswrapper[4742]: I1205 05:55:34.244840 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"711aa3ef-6d85-4813-8182-facf9c865c4e","Type":"ContainerDied","Data":"e615c6da8f7e4feafd47b62bdba9aeb4e032fc694f470625207f347aefeb0e49"} Dec 05 05:55:34 crc kubenswrapper[4742]: I1205 05:55:34.244884 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e615c6da8f7e4feafd47b62bdba9aeb4e032fc694f470625207f347aefeb0e49" Dec 05 05:55:34 crc kubenswrapper[4742]: I1205 05:55:34.244885 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 05:55:46 crc kubenswrapper[4742]: I1205 05:55:46.670826 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:55:46 crc kubenswrapper[4742]: I1205 05:55:46.671480 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:55:46 crc kubenswrapper[4742]: I1205 05:55:46.671530 4742 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" Dec 05 05:55:46 crc kubenswrapper[4742]: I1205 05:55:46.672157 4742 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4"} pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 05:55:46 crc kubenswrapper[4742]: I1205 05:55:46.672254 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" containerID="cri-o://26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4" gracePeriod=600 Dec 05 05:55:47 crc kubenswrapper[4742]: I1205 05:55:47.325422 4742 generic.go:334] "Generic (PLEG): container finished" podID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerID="26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4" exitCode=0 Dec 05 05:55:47 crc kubenswrapper[4742]: I1205 05:55:47.325465 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerDied","Data":"26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4"} Dec 05 05:55:49 crc kubenswrapper[4742]: I1205 05:55:49.338193 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerStarted","Data":"dc2a5e1e92e2a0c25ac27056156677176ab47a63f4ca68a05a9db44f9bedc61a"} Dec 05 05:55:51 crc kubenswrapper[4742]: I1205 05:55:51.356592 4742 generic.go:334] "Generic (PLEG): container finished" podID="c433daac-2067-47ed-ba5c-01ae452a511d" containerID="0a65ea4819366ac4e5eec43564d17484eae47982cb5e36188d43e6a5e12f7105" exitCode=0 Dec 05 05:55:51 crc kubenswrapper[4742]: I1205 05:55:51.356733 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pwkx9" event={"ID":"c433daac-2067-47ed-ba5c-01ae452a511d","Type":"ContainerDied","Data":"0a65ea4819366ac4e5eec43564d17484eae47982cb5e36188d43e6a5e12f7105"} Dec 05 05:55:51 crc kubenswrapper[4742]: I1205 05:55:51.359516 4742 generic.go:334] "Generic (PLEG): container finished" podID="8eed6205-7703-433f-83cf-d7b51867e5ee" containerID="f566e5722527c38bbaebc7ebba10bdb1da9b2a042fce81eafe2fbed1b6499f8e" exitCode=0 Dec 05 05:55:51 crc kubenswrapper[4742]: I1205 05:55:51.359636 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dpnl4" event={"ID":"8eed6205-7703-433f-83cf-d7b51867e5ee","Type":"ContainerDied","Data":"f566e5722527c38bbaebc7ebba10bdb1da9b2a042fce81eafe2fbed1b6499f8e"} Dec 05 05:55:51 crc kubenswrapper[4742]: I1205 05:55:51.376379 4742 generic.go:334] "Generic (PLEG): container finished" podID="7fc280e3-5842-4844-ad57-d3526ceeb957" containerID="a9885554ef9cece59101a208efc3c512f303e47827477b1f1f8a3e3a6c1d43d1" exitCode=0 Dec 05 05:55:51 crc kubenswrapper[4742]: I1205 05:55:51.376514 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cbhtr" event={"ID":"7fc280e3-5842-4844-ad57-d3526ceeb957","Type":"ContainerDied","Data":"a9885554ef9cece59101a208efc3c512f303e47827477b1f1f8a3e3a6c1d43d1"} Dec 05 05:55:51 crc kubenswrapper[4742]: I1205 05:55:51.380388 4742 generic.go:334] "Generic (PLEG): container finished" podID="04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c" containerID="e36703c20bafcf893c5fa079003def09702fc6b0c65171a262ec9b5b9fd3fbc8" exitCode=0 Dec 05 05:55:51 crc kubenswrapper[4742]: I1205 05:55:51.380506 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gwx44" event={"ID":"04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c","Type":"ContainerDied","Data":"e36703c20bafcf893c5fa079003def09702fc6b0c65171a262ec9b5b9fd3fbc8"} Dec 05 05:55:51 crc kubenswrapper[4742]: I1205 05:55:51.390576 4742 generic.go:334] "Generic (PLEG): container finished" podID="ff4c4d35-276e-47e9-8b12-76361e2005bf" containerID="8eb92da410396a44be153a27aff4ba6fd67851ca7f10e2a41c7943fab1729219" exitCode=0 Dec 05 05:55:51 crc kubenswrapper[4742]: I1205 05:55:51.390654 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kjh59" event={"ID":"ff4c4d35-276e-47e9-8b12-76361e2005bf","Type":"ContainerDied","Data":"8eb92da410396a44be153a27aff4ba6fd67851ca7f10e2a41c7943fab1729219"} Dec 05 05:55:51 crc kubenswrapper[4742]: I1205 05:55:51.393682 4742 generic.go:334] "Generic (PLEG): container finished" podID="e64b157e-a58d-4dfd-8d97-be73077a1e25" containerID="c4877f8a5f108a4a43d0be948e1f3b063797add949662733880b8af6945842d2" exitCode=0 Dec 05 05:55:51 crc kubenswrapper[4742]: I1205 05:55:51.393743 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-99jn8" event={"ID":"e64b157e-a58d-4dfd-8d97-be73077a1e25","Type":"ContainerDied","Data":"c4877f8a5f108a4a43d0be948e1f3b063797add949662733880b8af6945842d2"} Dec 05 05:55:51 crc kubenswrapper[4742]: I1205 05:55:51.400243 4742 generic.go:334] "Generic (PLEG): container finished" podID="d8ed5ac0-ecdc-4f4f-a13b-223289da1f67" containerID="d6695e144be5489e2f6725bfd4af346873154e90293f75d83b786b32b7f10e82" exitCode=0 Dec 05 05:55:51 crc kubenswrapper[4742]: I1205 05:55:51.400326 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-89q4d" event={"ID":"d8ed5ac0-ecdc-4f4f-a13b-223289da1f67","Type":"ContainerDied","Data":"d6695e144be5489e2f6725bfd4af346873154e90293f75d83b786b32b7f10e82"} Dec 05 05:55:51 crc kubenswrapper[4742]: I1205 05:55:51.404875 4742 generic.go:334] "Generic (PLEG): container finished" podID="43ebbfe9-74ee-405b-82d8-d4a825a7386d" containerID="72b28a96e1b25346d757d10571d90d724b972cec2a76a5d52f9c1c0b268208a3" exitCode=0 Dec 05 05:55:51 crc kubenswrapper[4742]: I1205 05:55:51.404927 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7kmqp" event={"ID":"43ebbfe9-74ee-405b-82d8-d4a825a7386d","Type":"ContainerDied","Data":"72b28a96e1b25346d757d10571d90d724b972cec2a76a5d52f9c1c0b268208a3"} Dec 05 05:55:52 crc kubenswrapper[4742]: I1205 05:55:52.412883 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kjh59" event={"ID":"ff4c4d35-276e-47e9-8b12-76361e2005bf","Type":"ContainerStarted","Data":"6f0f0322a7bac20ced74a7b9f9dab148eaa5ec992b9a9b5d14b70312070e2260"} Dec 05 05:55:52 crc kubenswrapper[4742]: I1205 05:55:52.414717 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7kmqp" event={"ID":"43ebbfe9-74ee-405b-82d8-d4a825a7386d","Type":"ContainerStarted","Data":"813204be18af991f7e5b5ee544c09563a2e889df89d6c7357adbbbaf4e126818"} Dec 05 05:55:52 crc kubenswrapper[4742]: I1205 05:55:52.416304 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pwkx9" event={"ID":"c433daac-2067-47ed-ba5c-01ae452a511d","Type":"ContainerStarted","Data":"e3d9047fddaabf87292f2c6b8d57fffa7f04e6808c114553c0d6d1841d8a673c"} Dec 05 05:55:52 crc kubenswrapper[4742]: I1205 05:55:52.417873 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dpnl4" event={"ID":"8eed6205-7703-433f-83cf-d7b51867e5ee","Type":"ContainerStarted","Data":"deca516b9100434de9391d51109ccee090c931ca4c65fc0fb55194b6b2543c51"} Dec 05 05:55:52 crc kubenswrapper[4742]: I1205 05:55:52.419628 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cbhtr" event={"ID":"7fc280e3-5842-4844-ad57-d3526ceeb957","Type":"ContainerStarted","Data":"ffa77e5cdd938c92d9b6cb9398a478e149da7fb307e70f21fc01fdb9b33adc2e"} Dec 05 05:55:52 crc kubenswrapper[4742]: I1205 05:55:52.421305 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gwx44" event={"ID":"04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c","Type":"ContainerStarted","Data":"69abf936a854fbec80f87a64444ddeed3040064c9de7a164e6ad063408d1eee5"} Dec 05 05:55:52 crc kubenswrapper[4742]: I1205 05:55:52.423503 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-99jn8" event={"ID":"e64b157e-a58d-4dfd-8d97-be73077a1e25","Type":"ContainerStarted","Data":"417a2ca5c82dcfa2044c178c350523821cbbb48694d518d61b996c71a1ee41e0"} Dec 05 05:55:52 crc kubenswrapper[4742]: I1205 05:55:52.425124 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-89q4d" event={"ID":"d8ed5ac0-ecdc-4f4f-a13b-223289da1f67","Type":"ContainerStarted","Data":"26c4d48595677dec3b46779cce238cfdcf84d132832ed98cca8fb7f539df8776"} Dec 05 05:55:52 crc kubenswrapper[4742]: I1205 05:55:52.436018 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-kjh59" podStartSLOduration=3.005531259 podStartE2EDuration="1m25.435995387s" podCreationTimestamp="2025-12-05 05:54:27 +0000 UTC" firstStartedPulling="2025-12-05 05:54:29.498946519 +0000 UTC m=+145.411081581" lastFinishedPulling="2025-12-05 05:55:51.929410647 +0000 UTC m=+227.841545709" observedRunningTime="2025-12-05 05:55:52.43323715 +0000 UTC m=+228.345372222" watchObservedRunningTime="2025-12-05 05:55:52.435995387 +0000 UTC m=+228.348130449" Dec 05 05:55:52 crc kubenswrapper[4742]: I1205 05:55:52.451580 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-89q4d" podStartSLOduration=3.025097848 podStartE2EDuration="1m23.451565527s" podCreationTimestamp="2025-12-05 05:54:29 +0000 UTC" firstStartedPulling="2025-12-05 05:54:31.55617132 +0000 UTC m=+147.468306382" lastFinishedPulling="2025-12-05 05:55:51.982638999 +0000 UTC m=+227.894774061" observedRunningTime="2025-12-05 05:55:52.449860129 +0000 UTC m=+228.361995191" watchObservedRunningTime="2025-12-05 05:55:52.451565527 +0000 UTC m=+228.363700589" Dec 05 05:55:52 crc kubenswrapper[4742]: I1205 05:55:52.474948 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-cbhtr" podStartSLOduration=3.193093315 podStartE2EDuration="1m24.474927587s" podCreationTimestamp="2025-12-05 05:54:28 +0000 UTC" firstStartedPulling="2025-12-05 05:54:30.531257761 +0000 UTC m=+146.443392823" lastFinishedPulling="2025-12-05 05:55:51.813092033 +0000 UTC m=+227.725227095" observedRunningTime="2025-12-05 05:55:52.474426202 +0000 UTC m=+228.386561264" watchObservedRunningTime="2025-12-05 05:55:52.474927587 +0000 UTC m=+228.387062669" Dec 05 05:55:52 crc kubenswrapper[4742]: I1205 05:55:52.492434 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-dpnl4" podStartSLOduration=4.243317287 podStartE2EDuration="1m25.49241801s" podCreationTimestamp="2025-12-05 05:54:27 +0000 UTC" firstStartedPulling="2025-12-05 05:54:30.528730865 +0000 UTC m=+146.440865927" lastFinishedPulling="2025-12-05 05:55:51.777831588 +0000 UTC m=+227.689966650" observedRunningTime="2025-12-05 05:55:52.490404003 +0000 UTC m=+228.402539065" watchObservedRunningTime="2025-12-05 05:55:52.49241801 +0000 UTC m=+228.404553072" Dec 05 05:55:52 crc kubenswrapper[4742]: I1205 05:55:52.512893 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-99jn8" podStartSLOduration=3.202516941 podStartE2EDuration="1m24.512874618s" podCreationTimestamp="2025-12-05 05:54:28 +0000 UTC" firstStartedPulling="2025-12-05 05:54:30.538215598 +0000 UTC m=+146.450350660" lastFinishedPulling="2025-12-05 05:55:51.848573235 +0000 UTC m=+227.760708337" observedRunningTime="2025-12-05 05:55:52.51154012 +0000 UTC m=+228.423675192" watchObservedRunningTime="2025-12-05 05:55:52.512874618 +0000 UTC m=+228.425009680" Dec 05 05:55:52 crc kubenswrapper[4742]: I1205 05:55:52.531763 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pwkx9" podStartSLOduration=2.342970222 podStartE2EDuration="1m22.531745011s" podCreationTimestamp="2025-12-05 05:54:30 +0000 UTC" firstStartedPulling="2025-12-05 05:54:31.561338446 +0000 UTC m=+147.473473508" lastFinishedPulling="2025-12-05 05:55:51.750113235 +0000 UTC m=+227.662248297" observedRunningTime="2025-12-05 05:55:52.530381382 +0000 UTC m=+228.442516444" watchObservedRunningTime="2025-12-05 05:55:52.531745011 +0000 UTC m=+228.443880073" Dec 05 05:55:52 crc kubenswrapper[4742]: I1205 05:55:52.578046 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gwx44" podStartSLOduration=3.345967867 podStartE2EDuration="1m22.578031437s" podCreationTimestamp="2025-12-05 05:54:30 +0000 UTC" firstStartedPulling="2025-12-05 05:54:32.695234797 +0000 UTC m=+148.607369859" lastFinishedPulling="2025-12-05 05:55:51.927298367 +0000 UTC m=+227.839433429" observedRunningTime="2025-12-05 05:55:52.558102975 +0000 UTC m=+228.470238037" watchObservedRunningTime="2025-12-05 05:55:52.578031437 +0000 UTC m=+228.490166499" Dec 05 05:55:52 crc kubenswrapper[4742]: I1205 05:55:52.578433 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7kmqp" podStartSLOduration=3.48860256 podStartE2EDuration="1m21.578428698s" podCreationTimestamp="2025-12-05 05:54:31 +0000 UTC" firstStartedPulling="2025-12-05 05:54:33.749205667 +0000 UTC m=+149.661340719" lastFinishedPulling="2025-12-05 05:55:51.839031785 +0000 UTC m=+227.751166857" observedRunningTime="2025-12-05 05:55:52.577658867 +0000 UTC m=+228.489793929" watchObservedRunningTime="2025-12-05 05:55:52.578428698 +0000 UTC m=+228.490563760" Dec 05 05:55:58 crc kubenswrapper[4742]: I1205 05:55:58.104586 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-kjh59" Dec 05 05:55:58 crc kubenswrapper[4742]: I1205 05:55:58.106305 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-kjh59" Dec 05 05:55:58 crc kubenswrapper[4742]: I1205 05:55:58.286857 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-dpnl4" Dec 05 05:55:58 crc kubenswrapper[4742]: I1205 05:55:58.286908 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-dpnl4" Dec 05 05:55:58 crc kubenswrapper[4742]: I1205 05:55:58.512634 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-99jn8" Dec 05 05:55:58 crc kubenswrapper[4742]: I1205 05:55:58.512705 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-99jn8" Dec 05 05:55:58 crc kubenswrapper[4742]: I1205 05:55:58.581219 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-dpnl4" Dec 05 05:55:58 crc kubenswrapper[4742]: I1205 05:55:58.582873 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-99jn8" Dec 05 05:55:58 crc kubenswrapper[4742]: I1205 05:55:58.585462 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-kjh59" Dec 05 05:55:58 crc kubenswrapper[4742]: I1205 05:55:58.623019 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-kjh59" Dec 05 05:55:58 crc kubenswrapper[4742]: I1205 05:55:58.652536 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-dpnl4" Dec 05 05:55:58 crc kubenswrapper[4742]: I1205 05:55:58.743442 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-cbhtr" Dec 05 05:55:58 crc kubenswrapper[4742]: I1205 05:55:58.743489 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-cbhtr" Dec 05 05:55:58 crc kubenswrapper[4742]: I1205 05:55:58.784113 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-cbhtr" Dec 05 05:55:59 crc kubenswrapper[4742]: I1205 05:55:59.528045 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-99jn8" Dec 05 05:55:59 crc kubenswrapper[4742]: I1205 05:55:59.534151 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-cbhtr" Dec 05 05:56:00 crc kubenswrapper[4742]: I1205 05:56:00.304011 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-89q4d" Dec 05 05:56:00 crc kubenswrapper[4742]: I1205 05:56:00.304074 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-89q4d" Dec 05 05:56:00 crc kubenswrapper[4742]: I1205 05:56:00.390902 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-89q4d" Dec 05 05:56:00 crc kubenswrapper[4742]: I1205 05:56:00.401320 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cbhtr"] Dec 05 05:56:00 crc kubenswrapper[4742]: I1205 05:56:00.511609 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-89q4d" Dec 05 05:56:00 crc kubenswrapper[4742]: I1205 05:56:00.690768 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pwkx9" Dec 05 05:56:00 crc kubenswrapper[4742]: I1205 05:56:00.691022 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pwkx9" Dec 05 05:56:00 crc kubenswrapper[4742]: I1205 05:56:00.742266 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pwkx9" Dec 05 05:56:01 crc kubenswrapper[4742]: I1205 05:56:01.005744 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-99jn8"] Dec 05 05:56:01 crc kubenswrapper[4742]: I1205 05:56:01.284433 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gwx44" Dec 05 05:56:01 crc kubenswrapper[4742]: I1205 05:56:01.284487 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gwx44" Dec 05 05:56:01 crc kubenswrapper[4742]: I1205 05:56:01.345353 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gwx44" Dec 05 05:56:01 crc kubenswrapper[4742]: I1205 05:56:01.473615 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-cbhtr" podUID="7fc280e3-5842-4844-ad57-d3526ceeb957" containerName="registry-server" containerID="cri-o://ffa77e5cdd938c92d9b6cb9398a478e149da7fb307e70f21fc01fdb9b33adc2e" gracePeriod=2 Dec 05 05:56:01 crc kubenswrapper[4742]: I1205 05:56:01.474219 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-99jn8" podUID="e64b157e-a58d-4dfd-8d97-be73077a1e25" containerName="registry-server" containerID="cri-o://417a2ca5c82dcfa2044c178c350523821cbbb48694d518d61b996c71a1ee41e0" gracePeriod=2 Dec 05 05:56:01 crc kubenswrapper[4742]: I1205 05:56:01.560327 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gwx44" Dec 05 05:56:01 crc kubenswrapper[4742]: I1205 05:56:01.563200 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pwkx9" Dec 05 05:56:01 crc kubenswrapper[4742]: I1205 05:56:01.710798 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7kmqp" Dec 05 05:56:01 crc kubenswrapper[4742]: I1205 05:56:01.710852 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7kmqp" Dec 05 05:56:01 crc kubenswrapper[4742]: I1205 05:56:01.765855 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7kmqp" Dec 05 05:56:02 crc kubenswrapper[4742]: I1205 05:56:02.513128 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7kmqp" Dec 05 05:56:03 crc kubenswrapper[4742]: I1205 05:56:03.402892 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pwkx9"] Dec 05 05:56:03 crc kubenswrapper[4742]: I1205 05:56:03.960460 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-45686"] Dec 05 05:56:04 crc kubenswrapper[4742]: I1205 05:56:04.493605 4742 generic.go:334] "Generic (PLEG): container finished" podID="7fc280e3-5842-4844-ad57-d3526ceeb957" containerID="ffa77e5cdd938c92d9b6cb9398a478e149da7fb307e70f21fc01fdb9b33adc2e" exitCode=0 Dec 05 05:56:04 crc kubenswrapper[4742]: I1205 05:56:04.493697 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cbhtr" event={"ID":"7fc280e3-5842-4844-ad57-d3526ceeb957","Type":"ContainerDied","Data":"ffa77e5cdd938c92d9b6cb9398a478e149da7fb307e70f21fc01fdb9b33adc2e"} Dec 05 05:56:04 crc kubenswrapper[4742]: I1205 05:56:04.497755 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-99jn8" event={"ID":"e64b157e-a58d-4dfd-8d97-be73077a1e25","Type":"ContainerDied","Data":"417a2ca5c82dcfa2044c178c350523821cbbb48694d518d61b996c71a1ee41e0"} Dec 05 05:56:04 crc kubenswrapper[4742]: I1205 05:56:04.497698 4742 generic.go:334] "Generic (PLEG): container finished" podID="e64b157e-a58d-4dfd-8d97-be73077a1e25" containerID="417a2ca5c82dcfa2044c178c350523821cbbb48694d518d61b996c71a1ee41e0" exitCode=0 Dec 05 05:56:04 crc kubenswrapper[4742]: I1205 05:56:04.498304 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pwkx9" podUID="c433daac-2067-47ed-ba5c-01ae452a511d" containerName="registry-server" containerID="cri-o://e3d9047fddaabf87292f2c6b8d57fffa7f04e6808c114553c0d6d1841d8a673c" gracePeriod=2 Dec 05 05:56:05 crc kubenswrapper[4742]: I1205 05:56:05.801796 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7kmqp"] Dec 05 05:56:05 crc kubenswrapper[4742]: I1205 05:56:05.802070 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-7kmqp" podUID="43ebbfe9-74ee-405b-82d8-d4a825a7386d" containerName="registry-server" containerID="cri-o://813204be18af991f7e5b5ee544c09563a2e889df89d6c7357adbbbaf4e126818" gracePeriod=2 Dec 05 05:56:06 crc kubenswrapper[4742]: I1205 05:56:06.361288 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-99jn8" Dec 05 05:56:06 crc kubenswrapper[4742]: I1205 05:56:06.503645 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e64b157e-a58d-4dfd-8d97-be73077a1e25-catalog-content\") pod \"e64b157e-a58d-4dfd-8d97-be73077a1e25\" (UID: \"e64b157e-a58d-4dfd-8d97-be73077a1e25\") " Dec 05 05:56:06 crc kubenswrapper[4742]: I1205 05:56:06.503720 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rzhsr\" (UniqueName: \"kubernetes.io/projected/e64b157e-a58d-4dfd-8d97-be73077a1e25-kube-api-access-rzhsr\") pod \"e64b157e-a58d-4dfd-8d97-be73077a1e25\" (UID: \"e64b157e-a58d-4dfd-8d97-be73077a1e25\") " Dec 05 05:56:06 crc kubenswrapper[4742]: I1205 05:56:06.503776 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e64b157e-a58d-4dfd-8d97-be73077a1e25-utilities\") pod \"e64b157e-a58d-4dfd-8d97-be73077a1e25\" (UID: \"e64b157e-a58d-4dfd-8d97-be73077a1e25\") " Dec 05 05:56:06 crc kubenswrapper[4742]: I1205 05:56:06.504680 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e64b157e-a58d-4dfd-8d97-be73077a1e25-utilities" (OuterVolumeSpecName: "utilities") pod "e64b157e-a58d-4dfd-8d97-be73077a1e25" (UID: "e64b157e-a58d-4dfd-8d97-be73077a1e25"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:56:06 crc kubenswrapper[4742]: I1205 05:56:06.510163 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e64b157e-a58d-4dfd-8d97-be73077a1e25-kube-api-access-rzhsr" (OuterVolumeSpecName: "kube-api-access-rzhsr") pod "e64b157e-a58d-4dfd-8d97-be73077a1e25" (UID: "e64b157e-a58d-4dfd-8d97-be73077a1e25"). InnerVolumeSpecName "kube-api-access-rzhsr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:56:06 crc kubenswrapper[4742]: I1205 05:56:06.515402 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-99jn8" event={"ID":"e64b157e-a58d-4dfd-8d97-be73077a1e25","Type":"ContainerDied","Data":"4c26f167dd43ede8da5696552047e877a09dd46852dbe95ac4fd754300b8e556"} Dec 05 05:56:06 crc kubenswrapper[4742]: I1205 05:56:06.515456 4742 scope.go:117] "RemoveContainer" containerID="417a2ca5c82dcfa2044c178c350523821cbbb48694d518d61b996c71a1ee41e0" Dec 05 05:56:06 crc kubenswrapper[4742]: I1205 05:56:06.515485 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-99jn8" Dec 05 05:56:06 crc kubenswrapper[4742]: I1205 05:56:06.549670 4742 scope.go:117] "RemoveContainer" containerID="c4877f8a5f108a4a43d0be948e1f3b063797add949662733880b8af6945842d2" Dec 05 05:56:06 crc kubenswrapper[4742]: I1205 05:56:06.575399 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e64b157e-a58d-4dfd-8d97-be73077a1e25-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e64b157e-a58d-4dfd-8d97-be73077a1e25" (UID: "e64b157e-a58d-4dfd-8d97-be73077a1e25"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:56:06 crc kubenswrapper[4742]: I1205 05:56:06.604833 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e64b157e-a58d-4dfd-8d97-be73077a1e25-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:06 crc kubenswrapper[4742]: I1205 05:56:06.604863 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rzhsr\" (UniqueName: \"kubernetes.io/projected/e64b157e-a58d-4dfd-8d97-be73077a1e25-kube-api-access-rzhsr\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:06 crc kubenswrapper[4742]: I1205 05:56:06.604874 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e64b157e-a58d-4dfd-8d97-be73077a1e25-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:06 crc kubenswrapper[4742]: I1205 05:56:06.626109 4742 scope.go:117] "RemoveContainer" containerID="06c454304dd31229143cdc44197155bb019079a41933852da592819711645824" Dec 05 05:56:06 crc kubenswrapper[4742]: I1205 05:56:06.634351 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cbhtr" Dec 05 05:56:06 crc kubenswrapper[4742]: I1205 05:56:06.808202 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cmlms\" (UniqueName: \"kubernetes.io/projected/7fc280e3-5842-4844-ad57-d3526ceeb957-kube-api-access-cmlms\") pod \"7fc280e3-5842-4844-ad57-d3526ceeb957\" (UID: \"7fc280e3-5842-4844-ad57-d3526ceeb957\") " Dec 05 05:56:06 crc kubenswrapper[4742]: I1205 05:56:06.808274 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7fc280e3-5842-4844-ad57-d3526ceeb957-catalog-content\") pod \"7fc280e3-5842-4844-ad57-d3526ceeb957\" (UID: \"7fc280e3-5842-4844-ad57-d3526ceeb957\") " Dec 05 05:56:06 crc kubenswrapper[4742]: I1205 05:56:06.808323 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7fc280e3-5842-4844-ad57-d3526ceeb957-utilities\") pod \"7fc280e3-5842-4844-ad57-d3526ceeb957\" (UID: \"7fc280e3-5842-4844-ad57-d3526ceeb957\") " Dec 05 05:56:06 crc kubenswrapper[4742]: I1205 05:56:06.810194 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7fc280e3-5842-4844-ad57-d3526ceeb957-utilities" (OuterVolumeSpecName: "utilities") pod "7fc280e3-5842-4844-ad57-d3526ceeb957" (UID: "7fc280e3-5842-4844-ad57-d3526ceeb957"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:56:06 crc kubenswrapper[4742]: I1205 05:56:06.811791 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7fc280e3-5842-4844-ad57-d3526ceeb957-kube-api-access-cmlms" (OuterVolumeSpecName: "kube-api-access-cmlms") pod "7fc280e3-5842-4844-ad57-d3526ceeb957" (UID: "7fc280e3-5842-4844-ad57-d3526ceeb957"). InnerVolumeSpecName "kube-api-access-cmlms". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:56:06 crc kubenswrapper[4742]: I1205 05:56:06.860425 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-99jn8"] Dec 05 05:56:06 crc kubenswrapper[4742]: I1205 05:56:06.863502 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-99jn8"] Dec 05 05:56:06 crc kubenswrapper[4742]: I1205 05:56:06.879853 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7fc280e3-5842-4844-ad57-d3526ceeb957-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7fc280e3-5842-4844-ad57-d3526ceeb957" (UID: "7fc280e3-5842-4844-ad57-d3526ceeb957"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:56:06 crc kubenswrapper[4742]: I1205 05:56:06.909523 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cmlms\" (UniqueName: \"kubernetes.io/projected/7fc280e3-5842-4844-ad57-d3526ceeb957-kube-api-access-cmlms\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:06 crc kubenswrapper[4742]: I1205 05:56:06.909566 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7fc280e3-5842-4844-ad57-d3526ceeb957-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:06 crc kubenswrapper[4742]: I1205 05:56:06.909578 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7fc280e3-5842-4844-ad57-d3526ceeb957-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:07 crc kubenswrapper[4742]: I1205 05:56:07.521740 4742 generic.go:334] "Generic (PLEG): container finished" podID="43ebbfe9-74ee-405b-82d8-d4a825a7386d" containerID="813204be18af991f7e5b5ee544c09563a2e889df89d6c7357adbbbaf4e126818" exitCode=0 Dec 05 05:56:07 crc kubenswrapper[4742]: I1205 05:56:07.522010 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7kmqp" event={"ID":"43ebbfe9-74ee-405b-82d8-d4a825a7386d","Type":"ContainerDied","Data":"813204be18af991f7e5b5ee544c09563a2e889df89d6c7357adbbbaf4e126818"} Dec 05 05:56:07 crc kubenswrapper[4742]: I1205 05:56:07.523695 4742 generic.go:334] "Generic (PLEG): container finished" podID="c433daac-2067-47ed-ba5c-01ae452a511d" containerID="e3d9047fddaabf87292f2c6b8d57fffa7f04e6808c114553c0d6d1841d8a673c" exitCode=0 Dec 05 05:56:07 crc kubenswrapper[4742]: I1205 05:56:07.523742 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pwkx9" event={"ID":"c433daac-2067-47ed-ba5c-01ae452a511d","Type":"ContainerDied","Data":"e3d9047fddaabf87292f2c6b8d57fffa7f04e6808c114553c0d6d1841d8a673c"} Dec 05 05:56:07 crc kubenswrapper[4742]: I1205 05:56:07.525955 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cbhtr" event={"ID":"7fc280e3-5842-4844-ad57-d3526ceeb957","Type":"ContainerDied","Data":"74466f1d3cb5d0dca4c6a3247ac6fdf68994e86088b5136940caa485b4343826"} Dec 05 05:56:07 crc kubenswrapper[4742]: I1205 05:56:07.525986 4742 scope.go:117] "RemoveContainer" containerID="ffa77e5cdd938c92d9b6cb9398a478e149da7fb307e70f21fc01fdb9b33adc2e" Dec 05 05:56:07 crc kubenswrapper[4742]: I1205 05:56:07.526113 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cbhtr" Dec 05 05:56:07 crc kubenswrapper[4742]: I1205 05:56:07.551846 4742 scope.go:117] "RemoveContainer" containerID="a9885554ef9cece59101a208efc3c512f303e47827477b1f1f8a3e3a6c1d43d1" Dec 05 05:56:07 crc kubenswrapper[4742]: I1205 05:56:07.552628 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cbhtr"] Dec 05 05:56:07 crc kubenswrapper[4742]: I1205 05:56:07.559317 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-cbhtr"] Dec 05 05:56:07 crc kubenswrapper[4742]: I1205 05:56:07.573974 4742 scope.go:117] "RemoveContainer" containerID="fc1bad23496f588771e7d0a2613908887d50daf49cf7ccb0ab803a7b628f7fa9" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.389610 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7fc280e3-5842-4844-ad57-d3526ceeb957" path="/var/lib/kubelet/pods/7fc280e3-5842-4844-ad57-d3526ceeb957/volumes" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.390202 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e64b157e-a58d-4dfd-8d97-be73077a1e25" path="/var/lib/kubelet/pods/e64b157e-a58d-4dfd-8d97-be73077a1e25/volumes" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.626573 4742 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 05:56:08 crc kubenswrapper[4742]: E1205 05:56:08.626850 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fc280e3-5842-4844-ad57-d3526ceeb957" containerName="extract-utilities" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.626864 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fc280e3-5842-4844-ad57-d3526ceeb957" containerName="extract-utilities" Dec 05 05:56:08 crc kubenswrapper[4742]: E1205 05:56:08.626888 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e64b157e-a58d-4dfd-8d97-be73077a1e25" containerName="extract-utilities" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.626898 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="e64b157e-a58d-4dfd-8d97-be73077a1e25" containerName="extract-utilities" Dec 05 05:56:08 crc kubenswrapper[4742]: E1205 05:56:08.626913 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e64b157e-a58d-4dfd-8d97-be73077a1e25" containerName="extract-content" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.626922 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="e64b157e-a58d-4dfd-8d97-be73077a1e25" containerName="extract-content" Dec 05 05:56:08 crc kubenswrapper[4742]: E1205 05:56:08.626931 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e64b157e-a58d-4dfd-8d97-be73077a1e25" containerName="registry-server" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.626939 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="e64b157e-a58d-4dfd-8d97-be73077a1e25" containerName="registry-server" Dec 05 05:56:08 crc kubenswrapper[4742]: E1205 05:56:08.626954 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="711aa3ef-6d85-4813-8182-facf9c865c4e" containerName="pruner" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.626963 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="711aa3ef-6d85-4813-8182-facf9c865c4e" containerName="pruner" Dec 05 05:56:08 crc kubenswrapper[4742]: E1205 05:56:08.626975 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fc280e3-5842-4844-ad57-d3526ceeb957" containerName="extract-content" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.626984 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fc280e3-5842-4844-ad57-d3526ceeb957" containerName="extract-content" Dec 05 05:56:08 crc kubenswrapper[4742]: E1205 05:56:08.626998 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fc280e3-5842-4844-ad57-d3526ceeb957" containerName="registry-server" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.627006 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fc280e3-5842-4844-ad57-d3526ceeb957" containerName="registry-server" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.627156 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="711aa3ef-6d85-4813-8182-facf9c865c4e" containerName="pruner" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.627169 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="7fc280e3-5842-4844-ad57-d3526ceeb957" containerName="registry-server" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.627187 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="e64b157e-a58d-4dfd-8d97-be73077a1e25" containerName="registry-server" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.627539 4742 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.627712 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.627847 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3" gracePeriod=15 Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.627955 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61" gracePeriod=15 Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.627983 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6" gracePeriod=15 Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.628035 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d" gracePeriod=15 Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.628046 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b" gracePeriod=15 Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.628498 4742 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 05:56:08 crc kubenswrapper[4742]: E1205 05:56:08.628728 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.628748 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 05:56:08 crc kubenswrapper[4742]: E1205 05:56:08.628764 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.628773 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 05:56:08 crc kubenswrapper[4742]: E1205 05:56:08.628782 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.628790 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 05:56:08 crc kubenswrapper[4742]: E1205 05:56:08.628802 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.628809 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 05:56:08 crc kubenswrapper[4742]: E1205 05:56:08.628822 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.628829 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 05 05:56:08 crc kubenswrapper[4742]: E1205 05:56:08.628837 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.628843 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 05:56:08 crc kubenswrapper[4742]: E1205 05:56:08.628851 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.628857 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.628985 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.629000 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.629012 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.629021 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.629031 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.629040 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.645236 4742 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:35020->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.645298 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:35020->192.168.126.11:17697: read: connection reset by peer" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.671429 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.731914 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.732188 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.732228 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.732256 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.732310 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.732335 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.732426 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.732450 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.835097 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.835424 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.835451 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.835468 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.835511 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.835549 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.835576 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.835597 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.835658 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.835693 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.835713 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.835733 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.835752 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.835771 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.835789 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.835808 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.881716 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pwkx9" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.882428 4742 status_manager.go:851] "Failed to get status for pod" podUID="c433daac-2067-47ed-ba5c-01ae452a511d" pod="openshift-marketplace/redhat-marketplace-pwkx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pwkx9\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.882599 4742 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.882745 4742 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:08 crc kubenswrapper[4742]: I1205 05:56:08.966851 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:56:08 crc kubenswrapper[4742]: W1205 05:56:08.987363 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-34248cfa0b904ba552475bc66e6888e698ff69248b02470492c42fdcdc7ab852 WatchSource:0}: Error finding container 34248cfa0b904ba552475bc66e6888e698ff69248b02470492c42fdcdc7ab852: Status 404 returned error can't find the container with id 34248cfa0b904ba552475bc66e6888e698ff69248b02470492c42fdcdc7ab852 Dec 05 05:56:08 crc kubenswrapper[4742]: E1205 05:56:08.990371 4742 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.233:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187e3c15f5b065c9 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 05:56:08.989771209 +0000 UTC m=+244.901906291,LastTimestamp:2025-12-05 05:56:08.989771209 +0000 UTC m=+244.901906291,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 05:56:09 crc kubenswrapper[4742]: I1205 05:56:09.037941 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r7l2w\" (UniqueName: \"kubernetes.io/projected/c433daac-2067-47ed-ba5c-01ae452a511d-kube-api-access-r7l2w\") pod \"c433daac-2067-47ed-ba5c-01ae452a511d\" (UID: \"c433daac-2067-47ed-ba5c-01ae452a511d\") " Dec 05 05:56:09 crc kubenswrapper[4742]: I1205 05:56:09.037995 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c433daac-2067-47ed-ba5c-01ae452a511d-catalog-content\") pod \"c433daac-2067-47ed-ba5c-01ae452a511d\" (UID: \"c433daac-2067-47ed-ba5c-01ae452a511d\") " Dec 05 05:56:09 crc kubenswrapper[4742]: I1205 05:56:09.038051 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c433daac-2067-47ed-ba5c-01ae452a511d-utilities\") pod \"c433daac-2067-47ed-ba5c-01ae452a511d\" (UID: \"c433daac-2067-47ed-ba5c-01ae452a511d\") " Dec 05 05:56:09 crc kubenswrapper[4742]: I1205 05:56:09.039173 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c433daac-2067-47ed-ba5c-01ae452a511d-utilities" (OuterVolumeSpecName: "utilities") pod "c433daac-2067-47ed-ba5c-01ae452a511d" (UID: "c433daac-2067-47ed-ba5c-01ae452a511d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:56:09 crc kubenswrapper[4742]: I1205 05:56:09.041031 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c433daac-2067-47ed-ba5c-01ae452a511d-kube-api-access-r7l2w" (OuterVolumeSpecName: "kube-api-access-r7l2w") pod "c433daac-2067-47ed-ba5c-01ae452a511d" (UID: "c433daac-2067-47ed-ba5c-01ae452a511d"). InnerVolumeSpecName "kube-api-access-r7l2w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:56:09 crc kubenswrapper[4742]: I1205 05:56:09.065015 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c433daac-2067-47ed-ba5c-01ae452a511d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c433daac-2067-47ed-ba5c-01ae452a511d" (UID: "c433daac-2067-47ed-ba5c-01ae452a511d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:56:09 crc kubenswrapper[4742]: I1205 05:56:09.139618 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c433daac-2067-47ed-ba5c-01ae452a511d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:09 crc kubenswrapper[4742]: I1205 05:56:09.139652 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c433daac-2067-47ed-ba5c-01ae452a511d-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:09 crc kubenswrapper[4742]: I1205 05:56:09.139666 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r7l2w\" (UniqueName: \"kubernetes.io/projected/c433daac-2067-47ed-ba5c-01ae452a511d-kube-api-access-r7l2w\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:09 crc kubenswrapper[4742]: I1205 05:56:09.544860 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 05:56:09 crc kubenswrapper[4742]: I1205 05:56:09.546425 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 05:56:09 crc kubenswrapper[4742]: I1205 05:56:09.547281 4742 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b" exitCode=2 Dec 05 05:56:09 crc kubenswrapper[4742]: I1205 05:56:09.549457 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pwkx9" event={"ID":"c433daac-2067-47ed-ba5c-01ae452a511d","Type":"ContainerDied","Data":"215d4043a88775affa807a5708b5c88eb94219f142a867839e77c8ca58f751b6"} Dec 05 05:56:09 crc kubenswrapper[4742]: I1205 05:56:09.549494 4742 scope.go:117] "RemoveContainer" containerID="e3d9047fddaabf87292f2c6b8d57fffa7f04e6808c114553c0d6d1841d8a673c" Dec 05 05:56:09 crc kubenswrapper[4742]: I1205 05:56:09.549497 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pwkx9" Dec 05 05:56:09 crc kubenswrapper[4742]: I1205 05:56:09.550209 4742 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:09 crc kubenswrapper[4742]: I1205 05:56:09.550562 4742 status_manager.go:851] "Failed to get status for pod" podUID="c433daac-2067-47ed-ba5c-01ae452a511d" pod="openshift-marketplace/redhat-marketplace-pwkx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pwkx9\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:09 crc kubenswrapper[4742]: I1205 05:56:09.550908 4742 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:09 crc kubenswrapper[4742]: I1205 05:56:09.550941 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"34248cfa0b904ba552475bc66e6888e698ff69248b02470492c42fdcdc7ab852"} Dec 05 05:56:09 crc kubenswrapper[4742]: I1205 05:56:09.566263 4742 status_manager.go:851] "Failed to get status for pod" podUID="c433daac-2067-47ed-ba5c-01ae452a511d" pod="openshift-marketplace/redhat-marketplace-pwkx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pwkx9\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:09 crc kubenswrapper[4742]: I1205 05:56:09.566802 4742 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:09 crc kubenswrapper[4742]: I1205 05:56:09.567111 4742 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:09 crc kubenswrapper[4742]: I1205 05:56:09.569724 4742 scope.go:117] "RemoveContainer" containerID="0a65ea4819366ac4e5eec43564d17484eae47982cb5e36188d43e6a5e12f7105" Dec 05 05:56:09 crc kubenswrapper[4742]: I1205 05:56:09.603760 4742 scope.go:117] "RemoveContainer" containerID="a6b4433a8e42085703ea5d8e799db7cbe83bd13b040ee9772c71f338bf3b8e15" Dec 05 05:56:10 crc kubenswrapper[4742]: I1205 05:56:10.287840 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7kmqp" Dec 05 05:56:10 crc kubenswrapper[4742]: I1205 05:56:10.288671 4742 status_manager.go:851] "Failed to get status for pod" podUID="c433daac-2067-47ed-ba5c-01ae452a511d" pod="openshift-marketplace/redhat-marketplace-pwkx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pwkx9\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:10 crc kubenswrapper[4742]: I1205 05:56:10.289099 4742 status_manager.go:851] "Failed to get status for pod" podUID="43ebbfe9-74ee-405b-82d8-d4a825a7386d" pod="openshift-marketplace/redhat-operators-7kmqp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-7kmqp\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:10 crc kubenswrapper[4742]: I1205 05:56:10.289335 4742 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:10 crc kubenswrapper[4742]: I1205 05:56:10.289675 4742 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:10 crc kubenswrapper[4742]: I1205 05:56:10.454524 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hvdrn\" (UniqueName: \"kubernetes.io/projected/43ebbfe9-74ee-405b-82d8-d4a825a7386d-kube-api-access-hvdrn\") pod \"43ebbfe9-74ee-405b-82d8-d4a825a7386d\" (UID: \"43ebbfe9-74ee-405b-82d8-d4a825a7386d\") " Dec 05 05:56:10 crc kubenswrapper[4742]: I1205 05:56:10.454613 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43ebbfe9-74ee-405b-82d8-d4a825a7386d-catalog-content\") pod \"43ebbfe9-74ee-405b-82d8-d4a825a7386d\" (UID: \"43ebbfe9-74ee-405b-82d8-d4a825a7386d\") " Dec 05 05:56:10 crc kubenswrapper[4742]: I1205 05:56:10.454718 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43ebbfe9-74ee-405b-82d8-d4a825a7386d-utilities\") pod \"43ebbfe9-74ee-405b-82d8-d4a825a7386d\" (UID: \"43ebbfe9-74ee-405b-82d8-d4a825a7386d\") " Dec 05 05:56:10 crc kubenswrapper[4742]: I1205 05:56:10.455594 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43ebbfe9-74ee-405b-82d8-d4a825a7386d-utilities" (OuterVolumeSpecName: "utilities") pod "43ebbfe9-74ee-405b-82d8-d4a825a7386d" (UID: "43ebbfe9-74ee-405b-82d8-d4a825a7386d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:56:10 crc kubenswrapper[4742]: I1205 05:56:10.459498 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43ebbfe9-74ee-405b-82d8-d4a825a7386d-kube-api-access-hvdrn" (OuterVolumeSpecName: "kube-api-access-hvdrn") pod "43ebbfe9-74ee-405b-82d8-d4a825a7386d" (UID: "43ebbfe9-74ee-405b-82d8-d4a825a7386d"). InnerVolumeSpecName "kube-api-access-hvdrn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:56:10 crc kubenswrapper[4742]: I1205 05:56:10.556584 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43ebbfe9-74ee-405b-82d8-d4a825a7386d-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:10 crc kubenswrapper[4742]: I1205 05:56:10.556612 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hvdrn\" (UniqueName: \"kubernetes.io/projected/43ebbfe9-74ee-405b-82d8-d4a825a7386d-kube-api-access-hvdrn\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:10 crc kubenswrapper[4742]: I1205 05:56:10.558204 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7kmqp" event={"ID":"43ebbfe9-74ee-405b-82d8-d4a825a7386d","Type":"ContainerDied","Data":"2c230cc833d5787dbcae913aa9baf231c2b77833062e8f5e2afaad199b9b3080"} Dec 05 05:56:10 crc kubenswrapper[4742]: I1205 05:56:10.558251 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7kmqp" Dec 05 05:56:10 crc kubenswrapper[4742]: I1205 05:56:10.558269 4742 scope.go:117] "RemoveContainer" containerID="813204be18af991f7e5b5ee544c09563a2e889df89d6c7357adbbbaf4e126818" Dec 05 05:56:10 crc kubenswrapper[4742]: I1205 05:56:10.558928 4742 status_manager.go:851] "Failed to get status for pod" podUID="c433daac-2067-47ed-ba5c-01ae452a511d" pod="openshift-marketplace/redhat-marketplace-pwkx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pwkx9\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:10 crc kubenswrapper[4742]: I1205 05:56:10.559335 4742 status_manager.go:851] "Failed to get status for pod" podUID="43ebbfe9-74ee-405b-82d8-d4a825a7386d" pod="openshift-marketplace/redhat-operators-7kmqp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-7kmqp\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:10 crc kubenswrapper[4742]: I1205 05:56:10.559665 4742 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:10 crc kubenswrapper[4742]: I1205 05:56:10.574793 4742 scope.go:117] "RemoveContainer" containerID="72b28a96e1b25346d757d10571d90d724b972cec2a76a5d52f9c1c0b268208a3" Dec 05 05:56:10 crc kubenswrapper[4742]: I1205 05:56:10.591154 4742 scope.go:117] "RemoveContainer" containerID="c97eb8a6bf8096eace358f5c7d59e5683de6a6ef775d62fc11be7f873924e3a9" Dec 05 05:56:10 crc kubenswrapper[4742]: I1205 05:56:10.925547 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43ebbfe9-74ee-405b-82d8-d4a825a7386d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "43ebbfe9-74ee-405b-82d8-d4a825a7386d" (UID: "43ebbfe9-74ee-405b-82d8-d4a825a7386d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:56:10 crc kubenswrapper[4742]: I1205 05:56:10.962873 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43ebbfe9-74ee-405b-82d8-d4a825a7386d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.176698 4742 status_manager.go:851] "Failed to get status for pod" podUID="43ebbfe9-74ee-405b-82d8-d4a825a7386d" pod="openshift-marketplace/redhat-operators-7kmqp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-7kmqp\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.177154 4742 status_manager.go:851] "Failed to get status for pod" podUID="c433daac-2067-47ed-ba5c-01ae452a511d" pod="openshift-marketplace/redhat-marketplace-pwkx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pwkx9\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.177674 4742 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:11 crc kubenswrapper[4742]: E1205 05:56:11.216841 4742 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.233:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187e3c15f5b065c9 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 05:56:08.989771209 +0000 UTC m=+244.901906291,LastTimestamp:2025-12-05 05:56:08.989771209 +0000 UTC m=+244.901906291,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.574603 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"3c67e59b790f6e0c7fbf1efc906a10b3237cd9e88d26eaa41efdf19f51b5772f"} Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.576215 4742 status_manager.go:851] "Failed to get status for pod" podUID="c433daac-2067-47ed-ba5c-01ae452a511d" pod="openshift-marketplace/redhat-marketplace-pwkx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pwkx9\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.576672 4742 status_manager.go:851] "Failed to get status for pod" podUID="43ebbfe9-74ee-405b-82d8-d4a825a7386d" pod="openshift-marketplace/redhat-operators-7kmqp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-7kmqp\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.577318 4742 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.578370 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.580068 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.580802 4742 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d" exitCode=0 Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.580851 4742 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61" exitCode=0 Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.580872 4742 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6" exitCode=0 Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.580887 4742 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3" exitCode=0 Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.580983 4742 scope.go:117] "RemoveContainer" containerID="62f0f64da9eb73b19504d1eef0282bd47a98c4f6303a001b01282988995ee765" Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.595557 4742 generic.go:334] "Generic (PLEG): container finished" podID="7bb19c95-d3a5-4418-8787-82e424244073" containerID="7de69cb7c452679d77d80eb05947cc6ce3fdc1fbe650c1f12bce8172f71f53d5" exitCode=0 Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.595608 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"7bb19c95-d3a5-4418-8787-82e424244073","Type":"ContainerDied","Data":"7de69cb7c452679d77d80eb05947cc6ce3fdc1fbe650c1f12bce8172f71f53d5"} Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.596290 4742 status_manager.go:851] "Failed to get status for pod" podUID="c433daac-2067-47ed-ba5c-01ae452a511d" pod="openshift-marketplace/redhat-marketplace-pwkx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pwkx9\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.596582 4742 status_manager.go:851] "Failed to get status for pod" podUID="43ebbfe9-74ee-405b-82d8-d4a825a7386d" pod="openshift-marketplace/redhat-operators-7kmqp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-7kmqp\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.596825 4742 status_manager.go:851] "Failed to get status for pod" podUID="7bb19c95-d3a5-4418-8787-82e424244073" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.597053 4742 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.664796 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.665742 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.666353 4742 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.666693 4742 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.666949 4742 status_manager.go:851] "Failed to get status for pod" podUID="c433daac-2067-47ed-ba5c-01ae452a511d" pod="openshift-marketplace/redhat-marketplace-pwkx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pwkx9\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.667462 4742 status_manager.go:851] "Failed to get status for pod" podUID="43ebbfe9-74ee-405b-82d8-d4a825a7386d" pod="openshift-marketplace/redhat-operators-7kmqp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-7kmqp\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.667850 4742 status_manager.go:851] "Failed to get status for pod" podUID="7bb19c95-d3a5-4418-8787-82e424244073" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.773837 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.773933 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.774246 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.774300 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.774393 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.774425 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.774708 4742 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.774777 4742 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:11 crc kubenswrapper[4742]: I1205 05:56:11.774840 4742 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:12 crc kubenswrapper[4742]: I1205 05:56:12.395982 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 05 05:56:12 crc kubenswrapper[4742]: I1205 05:56:12.603951 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 05:56:12 crc kubenswrapper[4742]: I1205 05:56:12.604720 4742 scope.go:117] "RemoveContainer" containerID="40bfe54a15ed6509a5af55b77f7ea9b5e758fb593b008eb0cbbc5816bb9bbc2d" Dec 05 05:56:12 crc kubenswrapper[4742]: I1205 05:56:12.605111 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:56:12 crc kubenswrapper[4742]: I1205 05:56:12.607252 4742 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:12 crc kubenswrapper[4742]: I1205 05:56:12.610341 4742 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:12 crc kubenswrapper[4742]: I1205 05:56:12.610789 4742 status_manager.go:851] "Failed to get status for pod" podUID="43ebbfe9-74ee-405b-82d8-d4a825a7386d" pod="openshift-marketplace/redhat-operators-7kmqp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-7kmqp\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:12 crc kubenswrapper[4742]: I1205 05:56:12.610961 4742 status_manager.go:851] "Failed to get status for pod" podUID="c433daac-2067-47ed-ba5c-01ae452a511d" pod="openshift-marketplace/redhat-marketplace-pwkx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pwkx9\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:12 crc kubenswrapper[4742]: I1205 05:56:12.611149 4742 status_manager.go:851] "Failed to get status for pod" podUID="7bb19c95-d3a5-4418-8787-82e424244073" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:12 crc kubenswrapper[4742]: I1205 05:56:12.611794 4742 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:12 crc kubenswrapper[4742]: I1205 05:56:12.612048 4742 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:12 crc kubenswrapper[4742]: I1205 05:56:12.612394 4742 status_manager.go:851] "Failed to get status for pod" podUID="c433daac-2067-47ed-ba5c-01ae452a511d" pod="openshift-marketplace/redhat-marketplace-pwkx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pwkx9\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:12 crc kubenswrapper[4742]: I1205 05:56:12.612648 4742 status_manager.go:851] "Failed to get status for pod" podUID="43ebbfe9-74ee-405b-82d8-d4a825a7386d" pod="openshift-marketplace/redhat-operators-7kmqp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-7kmqp\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:12 crc kubenswrapper[4742]: I1205 05:56:12.612895 4742 status_manager.go:851] "Failed to get status for pod" podUID="7bb19c95-d3a5-4418-8787-82e424244073" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:12 crc kubenswrapper[4742]: I1205 05:56:12.621377 4742 scope.go:117] "RemoveContainer" containerID="4f7e675ac23b5c1444c5eb8cfecfea75da5a96c3b96cd89a2d4538ad2268cb61" Dec 05 05:56:12 crc kubenswrapper[4742]: I1205 05:56:12.635277 4742 scope.go:117] "RemoveContainer" containerID="e235be700a68cc157ed8a5af8550d33670d332b26bb74d6da4fbad5b2d4056b6" Dec 05 05:56:12 crc kubenswrapper[4742]: I1205 05:56:12.679294 4742 scope.go:117] "RemoveContainer" containerID="fb7cdc1bd531f7c57ff73414d838f38ef96482a9cca3af0c0be33b649cd51f5b" Dec 05 05:56:12 crc kubenswrapper[4742]: I1205 05:56:12.694321 4742 scope.go:117] "RemoveContainer" containerID="cf5add1e38221e414a153f023543a1fb31dc8d9d294cb6a96c927f76a06ce8a3" Dec 05 05:56:12 crc kubenswrapper[4742]: I1205 05:56:12.725024 4742 scope.go:117] "RemoveContainer" containerID="29eb4a3da79d54f35e4d5a347231516d725de2a13eadcd34fe17d033b72b1f1d" Dec 05 05:56:12 crc kubenswrapper[4742]: I1205 05:56:12.858326 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 05:56:12 crc kubenswrapper[4742]: I1205 05:56:12.858899 4742 status_manager.go:851] "Failed to get status for pod" podUID="43ebbfe9-74ee-405b-82d8-d4a825a7386d" pod="openshift-marketplace/redhat-operators-7kmqp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-7kmqp\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:12 crc kubenswrapper[4742]: I1205 05:56:12.859464 4742 status_manager.go:851] "Failed to get status for pod" podUID="c433daac-2067-47ed-ba5c-01ae452a511d" pod="openshift-marketplace/redhat-marketplace-pwkx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pwkx9\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:12 crc kubenswrapper[4742]: I1205 05:56:12.859948 4742 status_manager.go:851] "Failed to get status for pod" podUID="7bb19c95-d3a5-4418-8787-82e424244073" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:12 crc kubenswrapper[4742]: I1205 05:56:12.860472 4742 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:12 crc kubenswrapper[4742]: I1205 05:56:12.860888 4742 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:12 crc kubenswrapper[4742]: I1205 05:56:12.990754 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7bb19c95-d3a5-4418-8787-82e424244073-kubelet-dir\") pod \"7bb19c95-d3a5-4418-8787-82e424244073\" (UID: \"7bb19c95-d3a5-4418-8787-82e424244073\") " Dec 05 05:56:12 crc kubenswrapper[4742]: I1205 05:56:12.990845 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7bb19c95-d3a5-4418-8787-82e424244073-kube-api-access\") pod \"7bb19c95-d3a5-4418-8787-82e424244073\" (UID: \"7bb19c95-d3a5-4418-8787-82e424244073\") " Dec 05 05:56:12 crc kubenswrapper[4742]: I1205 05:56:12.990899 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/7bb19c95-d3a5-4418-8787-82e424244073-var-lock\") pod \"7bb19c95-d3a5-4418-8787-82e424244073\" (UID: \"7bb19c95-d3a5-4418-8787-82e424244073\") " Dec 05 05:56:12 crc kubenswrapper[4742]: I1205 05:56:12.990926 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7bb19c95-d3a5-4418-8787-82e424244073-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "7bb19c95-d3a5-4418-8787-82e424244073" (UID: "7bb19c95-d3a5-4418-8787-82e424244073"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:56:12 crc kubenswrapper[4742]: I1205 05:56:12.991230 4742 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7bb19c95-d3a5-4418-8787-82e424244073-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:12 crc kubenswrapper[4742]: I1205 05:56:12.991276 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7bb19c95-d3a5-4418-8787-82e424244073-var-lock" (OuterVolumeSpecName: "var-lock") pod "7bb19c95-d3a5-4418-8787-82e424244073" (UID: "7bb19c95-d3a5-4418-8787-82e424244073"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:56:12 crc kubenswrapper[4742]: I1205 05:56:12.995899 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb19c95-d3a5-4418-8787-82e424244073-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "7bb19c95-d3a5-4418-8787-82e424244073" (UID: "7bb19c95-d3a5-4418-8787-82e424244073"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:56:13 crc kubenswrapper[4742]: I1205 05:56:13.092430 4742 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/7bb19c95-d3a5-4418-8787-82e424244073-var-lock\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:13 crc kubenswrapper[4742]: I1205 05:56:13.092472 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7bb19c95-d3a5-4418-8787-82e424244073-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:13 crc kubenswrapper[4742]: I1205 05:56:13.615940 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"7bb19c95-d3a5-4418-8787-82e424244073","Type":"ContainerDied","Data":"86e001fcbfc81c7559e1b2b18d5a51572d3256c2d32092d8506114262aeb605a"} Dec 05 05:56:13 crc kubenswrapper[4742]: I1205 05:56:13.615988 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="86e001fcbfc81c7559e1b2b18d5a51572d3256c2d32092d8506114262aeb605a" Dec 05 05:56:13 crc kubenswrapper[4742]: I1205 05:56:13.616103 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 05:56:13 crc kubenswrapper[4742]: I1205 05:56:13.645667 4742 status_manager.go:851] "Failed to get status for pod" podUID="c433daac-2067-47ed-ba5c-01ae452a511d" pod="openshift-marketplace/redhat-marketplace-pwkx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pwkx9\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:13 crc kubenswrapper[4742]: I1205 05:56:13.646424 4742 status_manager.go:851] "Failed to get status for pod" podUID="43ebbfe9-74ee-405b-82d8-d4a825a7386d" pod="openshift-marketplace/redhat-operators-7kmqp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-7kmqp\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:13 crc kubenswrapper[4742]: I1205 05:56:13.647079 4742 status_manager.go:851] "Failed to get status for pod" podUID="7bb19c95-d3a5-4418-8787-82e424244073" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:13 crc kubenswrapper[4742]: I1205 05:56:13.647331 4742 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:13 crc kubenswrapper[4742]: I1205 05:56:13.647982 4742 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:14 crc kubenswrapper[4742]: I1205 05:56:14.386111 4742 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:14 crc kubenswrapper[4742]: I1205 05:56:14.386863 4742 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:14 crc kubenswrapper[4742]: I1205 05:56:14.387140 4742 status_manager.go:851] "Failed to get status for pod" podUID="c433daac-2067-47ed-ba5c-01ae452a511d" pod="openshift-marketplace/redhat-marketplace-pwkx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pwkx9\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:14 crc kubenswrapper[4742]: I1205 05:56:14.387572 4742 status_manager.go:851] "Failed to get status for pod" podUID="43ebbfe9-74ee-405b-82d8-d4a825a7386d" pod="openshift-marketplace/redhat-operators-7kmqp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-7kmqp\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:14 crc kubenswrapper[4742]: I1205 05:56:14.387960 4742 status_manager.go:851] "Failed to get status for pod" podUID="7bb19c95-d3a5-4418-8787-82e424244073" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:17 crc kubenswrapper[4742]: E1205 05:56:17.884986 4742 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:17 crc kubenswrapper[4742]: E1205 05:56:17.886231 4742 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:17 crc kubenswrapper[4742]: E1205 05:56:17.886734 4742 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:17 crc kubenswrapper[4742]: E1205 05:56:17.887221 4742 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:17 crc kubenswrapper[4742]: E1205 05:56:17.887944 4742 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:17 crc kubenswrapper[4742]: I1205 05:56:17.887996 4742 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 05 05:56:17 crc kubenswrapper[4742]: E1205 05:56:17.888471 4742 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" interval="200ms" Dec 05 05:56:18 crc kubenswrapper[4742]: E1205 05:56:18.089222 4742 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" interval="400ms" Dec 05 05:56:18 crc kubenswrapper[4742]: E1205 05:56:18.490748 4742 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" interval="800ms" Dec 05 05:56:19 crc kubenswrapper[4742]: E1205 05:56:19.292383 4742 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" interval="1.6s" Dec 05 05:56:20 crc kubenswrapper[4742]: E1205 05:56:20.897752 4742 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" interval="3.2s" Dec 05 05:56:21 crc kubenswrapper[4742]: E1205 05:56:21.218504 4742 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.233:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187e3c15f5b065c9 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 05:56:08.989771209 +0000 UTC m=+244.901906291,LastTimestamp:2025-12-05 05:56:08.989771209 +0000 UTC m=+244.901906291,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 05:56:22 crc kubenswrapper[4742]: I1205 05:56:22.382865 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:56:22 crc kubenswrapper[4742]: I1205 05:56:22.386671 4742 status_manager.go:851] "Failed to get status for pod" podUID="c433daac-2067-47ed-ba5c-01ae452a511d" pod="openshift-marketplace/redhat-marketplace-pwkx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pwkx9\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:22 crc kubenswrapper[4742]: I1205 05:56:22.387205 4742 status_manager.go:851] "Failed to get status for pod" podUID="43ebbfe9-74ee-405b-82d8-d4a825a7386d" pod="openshift-marketplace/redhat-operators-7kmqp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-7kmqp\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:22 crc kubenswrapper[4742]: I1205 05:56:22.387582 4742 status_manager.go:851] "Failed to get status for pod" podUID="7bb19c95-d3a5-4418-8787-82e424244073" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:22 crc kubenswrapper[4742]: I1205 05:56:22.387836 4742 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:22 crc kubenswrapper[4742]: I1205 05:56:22.411225 4742 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5ea18901-cdec-4f7a-96c3-610bb6b9eef5" Dec 05 05:56:22 crc kubenswrapper[4742]: I1205 05:56:22.411519 4742 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5ea18901-cdec-4f7a-96c3-610bb6b9eef5" Dec 05 05:56:22 crc kubenswrapper[4742]: E1205 05:56:22.412131 4742 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:56:22 crc kubenswrapper[4742]: I1205 05:56:22.412951 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:56:22 crc kubenswrapper[4742]: W1205 05:56:22.817845 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-f871a689739ac34d18285db3b97f91619c9a8488b0caf6dadfe7c6df8b952d13 WatchSource:0}: Error finding container f871a689739ac34d18285db3b97f91619c9a8488b0caf6dadfe7c6df8b952d13: Status 404 returned error can't find the container with id f871a689739ac34d18285db3b97f91619c9a8488b0caf6dadfe7c6df8b952d13 Dec 05 05:56:23 crc kubenswrapper[4742]: I1205 05:56:23.682506 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 05 05:56:23 crc kubenswrapper[4742]: I1205 05:56:23.683725 4742 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb" exitCode=1 Dec 05 05:56:23 crc kubenswrapper[4742]: I1205 05:56:23.684011 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb"} Dec 05 05:56:23 crc kubenswrapper[4742]: I1205 05:56:23.684763 4742 scope.go:117] "RemoveContainer" containerID="f31c4ad27d9e85911869c833d4ac3b177ce26731afe0bebaa2a278aea1ed20cb" Dec 05 05:56:23 crc kubenswrapper[4742]: I1205 05:56:23.685275 4742 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:23 crc kubenswrapper[4742]: I1205 05:56:23.685763 4742 status_manager.go:851] "Failed to get status for pod" podUID="c433daac-2067-47ed-ba5c-01ae452a511d" pod="openshift-marketplace/redhat-marketplace-pwkx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pwkx9\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:23 crc kubenswrapper[4742]: I1205 05:56:23.686416 4742 status_manager.go:851] "Failed to get status for pod" podUID="43ebbfe9-74ee-405b-82d8-d4a825a7386d" pod="openshift-marketplace/redhat-operators-7kmqp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-7kmqp\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:23 crc kubenswrapper[4742]: I1205 05:56:23.687012 4742 status_manager.go:851] "Failed to get status for pod" podUID="7bb19c95-d3a5-4418-8787-82e424244073" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:23 crc kubenswrapper[4742]: I1205 05:56:23.687421 4742 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="da76056379bbc938b5599d3e8933e129049d0f7bc5e097d9e606dee25312274c" exitCode=0 Dec 05 05:56:23 crc kubenswrapper[4742]: I1205 05:56:23.687524 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"da76056379bbc938b5599d3e8933e129049d0f7bc5e097d9e606dee25312274c"} Dec 05 05:56:23 crc kubenswrapper[4742]: I1205 05:56:23.687539 4742 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:23 crc kubenswrapper[4742]: I1205 05:56:23.687593 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"f871a689739ac34d18285db3b97f91619c9a8488b0caf6dadfe7c6df8b952d13"} Dec 05 05:56:23 crc kubenswrapper[4742]: I1205 05:56:23.688139 4742 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5ea18901-cdec-4f7a-96c3-610bb6b9eef5" Dec 05 05:56:23 crc kubenswrapper[4742]: I1205 05:56:23.688180 4742 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5ea18901-cdec-4f7a-96c3-610bb6b9eef5" Dec 05 05:56:23 crc kubenswrapper[4742]: I1205 05:56:23.688539 4742 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:23 crc kubenswrapper[4742]: E1205 05:56:23.688652 4742 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:56:23 crc kubenswrapper[4742]: I1205 05:56:23.689035 4742 status_manager.go:851] "Failed to get status for pod" podUID="c433daac-2067-47ed-ba5c-01ae452a511d" pod="openshift-marketplace/redhat-marketplace-pwkx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-pwkx9\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:23 crc kubenswrapper[4742]: I1205 05:56:23.689678 4742 status_manager.go:851] "Failed to get status for pod" podUID="43ebbfe9-74ee-405b-82d8-d4a825a7386d" pod="openshift-marketplace/redhat-operators-7kmqp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-7kmqp\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:23 crc kubenswrapper[4742]: I1205 05:56:23.690134 4742 status_manager.go:851] "Failed to get status for pod" podUID="7bb19c95-d3a5-4418-8787-82e424244073" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:23 crc kubenswrapper[4742]: I1205 05:56:23.690617 4742 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 05:56:24 crc kubenswrapper[4742]: E1205 05:56:24.098710 4742 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" interval="6.4s" Dec 05 05:56:24 crc kubenswrapper[4742]: I1205 05:56:24.123185 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:56:24 crc kubenswrapper[4742]: I1205 05:56:24.696209 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 05 05:56:24 crc kubenswrapper[4742]: I1205 05:56:24.696303 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"45d2c24352d08b53a215ebac98941dbc5eeafa40fc682c273cc18e7a3e6beffe"} Dec 05 05:56:24 crc kubenswrapper[4742]: I1205 05:56:24.699333 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"282f1dfaca2f391b5831bd62d8019951b2c0a2171fda6cd30193d6216f8c6af9"} Dec 05 05:56:24 crc kubenswrapper[4742]: I1205 05:56:24.699376 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"3ea0e4a0fa5c3d473dea5197406a6ba6d511a1faf211433852c3149a5e3f2c3a"} Dec 05 05:56:24 crc kubenswrapper[4742]: I1205 05:56:24.699390 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"09658d43a9f4b003a84c7a64b906c43a838bd72b622e60dbfdd45524dfb1df1b"} Dec 05 05:56:25 crc kubenswrapper[4742]: I1205 05:56:25.708721 4742 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5ea18901-cdec-4f7a-96c3-610bb6b9eef5" Dec 05 05:56:25 crc kubenswrapper[4742]: I1205 05:56:25.708930 4742 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5ea18901-cdec-4f7a-96c3-610bb6b9eef5" Dec 05 05:56:25 crc kubenswrapper[4742]: I1205 05:56:25.708953 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"8f8270afdffe6bcc6a8d4fdbf56f90f4348dad302379ddbb6e0b911ae809e5e3"} Dec 05 05:56:25 crc kubenswrapper[4742]: I1205 05:56:25.709017 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:56:25 crc kubenswrapper[4742]: I1205 05:56:25.709030 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"e46e2d1f911fa1cc47849b9dbe1339440c25c8527f1b4094f1859b40c02607e0"} Dec 05 05:56:27 crc kubenswrapper[4742]: I1205 05:56:27.413763 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:56:27 crc kubenswrapper[4742]: I1205 05:56:27.413839 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:56:27 crc kubenswrapper[4742]: I1205 05:56:27.420812 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.004362 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-45686" podUID="f3d9ea9f-6af6-42ea-9298-2e970da2572e" containerName="oauth-openshift" containerID="cri-o://02d31a7fe7e634d441cbc67e03a1804c2214ff3263bbf926965bf63e20f6b0cc" gracePeriod=15 Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.620243 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.654481 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-cliconfig\") pod \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.654528 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-user-idp-0-file-data\") pod \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.654555 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-user-template-error\") pod \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.654583 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-trusted-ca-bundle\") pod \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.654607 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-service-ca\") pod \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.654634 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lc5fq\" (UniqueName: \"kubernetes.io/projected/f3d9ea9f-6af6-42ea-9298-2e970da2572e-kube-api-access-lc5fq\") pod \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.654656 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-user-template-login\") pod \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.654681 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f3d9ea9f-6af6-42ea-9298-2e970da2572e-audit-policies\") pod \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.654702 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-router-certs\") pod \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.654725 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-session\") pod \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.654756 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f3d9ea9f-6af6-42ea-9298-2e970da2572e-audit-dir\") pod \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.654779 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-user-template-provider-selection\") pod \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.654807 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-ocp-branding-template\") pod \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.654833 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-serving-cert\") pod \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\" (UID: \"f3d9ea9f-6af6-42ea-9298-2e970da2572e\") " Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.655466 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "f3d9ea9f-6af6-42ea-9298-2e970da2572e" (UID: "f3d9ea9f-6af6-42ea-9298-2e970da2572e"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.655522 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "f3d9ea9f-6af6-42ea-9298-2e970da2572e" (UID: "f3d9ea9f-6af6-42ea-9298-2e970da2572e"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.655610 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f3d9ea9f-6af6-42ea-9298-2e970da2572e-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f3d9ea9f-6af6-42ea-9298-2e970da2572e" (UID: "f3d9ea9f-6af6-42ea-9298-2e970da2572e"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.656189 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "f3d9ea9f-6af6-42ea-9298-2e970da2572e" (UID: "f3d9ea9f-6af6-42ea-9298-2e970da2572e"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.656265 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f3d9ea9f-6af6-42ea-9298-2e970da2572e-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "f3d9ea9f-6af6-42ea-9298-2e970da2572e" (UID: "f3d9ea9f-6af6-42ea-9298-2e970da2572e"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.660206 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "f3d9ea9f-6af6-42ea-9298-2e970da2572e" (UID: "f3d9ea9f-6af6-42ea-9298-2e970da2572e"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.660529 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "f3d9ea9f-6af6-42ea-9298-2e970da2572e" (UID: "f3d9ea9f-6af6-42ea-9298-2e970da2572e"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.660761 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "f3d9ea9f-6af6-42ea-9298-2e970da2572e" (UID: "f3d9ea9f-6af6-42ea-9298-2e970da2572e"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.661147 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3d9ea9f-6af6-42ea-9298-2e970da2572e-kube-api-access-lc5fq" (OuterVolumeSpecName: "kube-api-access-lc5fq") pod "f3d9ea9f-6af6-42ea-9298-2e970da2572e" (UID: "f3d9ea9f-6af6-42ea-9298-2e970da2572e"). InnerVolumeSpecName "kube-api-access-lc5fq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.662247 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "f3d9ea9f-6af6-42ea-9298-2e970da2572e" (UID: "f3d9ea9f-6af6-42ea-9298-2e970da2572e"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.662652 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "f3d9ea9f-6af6-42ea-9298-2e970da2572e" (UID: "f3d9ea9f-6af6-42ea-9298-2e970da2572e"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.662720 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "f3d9ea9f-6af6-42ea-9298-2e970da2572e" (UID: "f3d9ea9f-6af6-42ea-9298-2e970da2572e"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.662906 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "f3d9ea9f-6af6-42ea-9298-2e970da2572e" (UID: "f3d9ea9f-6af6-42ea-9298-2e970da2572e"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.664261 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "f3d9ea9f-6af6-42ea-9298-2e970da2572e" (UID: "f3d9ea9f-6af6-42ea-9298-2e970da2572e"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.731980 4742 generic.go:334] "Generic (PLEG): container finished" podID="f3d9ea9f-6af6-42ea-9298-2e970da2572e" containerID="02d31a7fe7e634d441cbc67e03a1804c2214ff3263bbf926965bf63e20f6b0cc" exitCode=0 Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.732038 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-45686" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.732115 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-45686" event={"ID":"f3d9ea9f-6af6-42ea-9298-2e970da2572e","Type":"ContainerDied","Data":"02d31a7fe7e634d441cbc67e03a1804c2214ff3263bbf926965bf63e20f6b0cc"} Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.732298 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-45686" event={"ID":"f3d9ea9f-6af6-42ea-9298-2e970da2572e","Type":"ContainerDied","Data":"fa95dfb41e16808fd01d1ff1998f157e08e570333fbb8c56b82cee460b71975f"} Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.732338 4742 scope.go:117] "RemoveContainer" containerID="02d31a7fe7e634d441cbc67e03a1804c2214ff3263bbf926965bf63e20f6b0cc" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.753313 4742 scope.go:117] "RemoveContainer" containerID="02d31a7fe7e634d441cbc67e03a1804c2214ff3263bbf926965bf63e20f6b0cc" Dec 05 05:56:29 crc kubenswrapper[4742]: E1205 05:56:29.753822 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02d31a7fe7e634d441cbc67e03a1804c2214ff3263bbf926965bf63e20f6b0cc\": container with ID starting with 02d31a7fe7e634d441cbc67e03a1804c2214ff3263bbf926965bf63e20f6b0cc not found: ID does not exist" containerID="02d31a7fe7e634d441cbc67e03a1804c2214ff3263bbf926965bf63e20f6b0cc" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.753864 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02d31a7fe7e634d441cbc67e03a1804c2214ff3263bbf926965bf63e20f6b0cc"} err="failed to get container status \"02d31a7fe7e634d441cbc67e03a1804c2214ff3263bbf926965bf63e20f6b0cc\": rpc error: code = NotFound desc = could not find container \"02d31a7fe7e634d441cbc67e03a1804c2214ff3263bbf926965bf63e20f6b0cc\": container with ID starting with 02d31a7fe7e634d441cbc67e03a1804c2214ff3263bbf926965bf63e20f6b0cc not found: ID does not exist" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.756529 4742 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.756573 4742 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.756597 4742 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.756618 4742 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.756638 4742 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.756656 4742 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.756675 4742 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.756695 4742 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.756714 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lc5fq\" (UniqueName: \"kubernetes.io/projected/f3d9ea9f-6af6-42ea-9298-2e970da2572e-kube-api-access-lc5fq\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.756733 4742 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.756754 4742 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f3d9ea9f-6af6-42ea-9298-2e970da2572e-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.756774 4742 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.756793 4742 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/f3d9ea9f-6af6-42ea-9298-2e970da2572e-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.756812 4742 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f3d9ea9f-6af6-42ea-9298-2e970da2572e-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 05 05:56:29 crc kubenswrapper[4742]: I1205 05:56:29.936520 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:56:30 crc kubenswrapper[4742]: I1205 05:56:30.717614 4742 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:56:30 crc kubenswrapper[4742]: I1205 05:56:30.742490 4742 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5ea18901-cdec-4f7a-96c3-610bb6b9eef5" Dec 05 05:56:30 crc kubenswrapper[4742]: I1205 05:56:30.742524 4742 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5ea18901-cdec-4f7a-96c3-610bb6b9eef5" Dec 05 05:56:30 crc kubenswrapper[4742]: I1205 05:56:30.747693 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:56:30 crc kubenswrapper[4742]: I1205 05:56:30.750045 4742 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="c1076969-108e-4346-bed2-6ed42d293e1f" Dec 05 05:56:30 crc kubenswrapper[4742]: E1205 05:56:30.969889 4742 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-system-cliconfig\": Failed to watch *v1.ConfigMap: unknown (get configmaps)" logger="UnhandledError" Dec 05 05:56:31 crc kubenswrapper[4742]: I1205 05:56:31.747850 4742 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5ea18901-cdec-4f7a-96c3-610bb6b9eef5" Dec 05 05:56:31 crc kubenswrapper[4742]: I1205 05:56:31.748099 4742 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5ea18901-cdec-4f7a-96c3-610bb6b9eef5" Dec 05 05:56:34 crc kubenswrapper[4742]: I1205 05:56:34.122944 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:56:34 crc kubenswrapper[4742]: I1205 05:56:34.128485 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:56:34 crc kubenswrapper[4742]: I1205 05:56:34.414203 4742 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="c1076969-108e-4346-bed2-6ed42d293e1f" Dec 05 05:56:34 crc kubenswrapper[4742]: I1205 05:56:34.772404 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 05:56:40 crc kubenswrapper[4742]: I1205 05:56:40.285970 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 05 05:56:40 crc kubenswrapper[4742]: I1205 05:56:40.937876 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 05 05:56:41 crc kubenswrapper[4742]: I1205 05:56:41.106288 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 05 05:56:41 crc kubenswrapper[4742]: I1205 05:56:41.233729 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 05 05:56:41 crc kubenswrapper[4742]: I1205 05:56:41.358221 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 05 05:56:41 crc kubenswrapper[4742]: I1205 05:56:41.470534 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 05 05:56:41 crc kubenswrapper[4742]: I1205 05:56:41.470773 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 05 05:56:41 crc kubenswrapper[4742]: I1205 05:56:41.718532 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 05 05:56:41 crc kubenswrapper[4742]: I1205 05:56:41.745130 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 05 05:56:42 crc kubenswrapper[4742]: I1205 05:56:42.144833 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 05 05:56:42 crc kubenswrapper[4742]: I1205 05:56:42.466763 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 05 05:56:42 crc kubenswrapper[4742]: I1205 05:56:42.554000 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 05 05:56:42 crc kubenswrapper[4742]: I1205 05:56:42.709981 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 05 05:56:42 crc kubenswrapper[4742]: I1205 05:56:42.737452 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 05 05:56:42 crc kubenswrapper[4742]: I1205 05:56:42.815154 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 05 05:56:42 crc kubenswrapper[4742]: I1205 05:56:42.841003 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 05 05:56:42 crc kubenswrapper[4742]: I1205 05:56:42.893296 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 05 05:56:42 crc kubenswrapper[4742]: I1205 05:56:42.920258 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 05 05:56:42 crc kubenswrapper[4742]: I1205 05:56:42.942528 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 05 05:56:43 crc kubenswrapper[4742]: I1205 05:56:43.007948 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 05 05:56:43 crc kubenswrapper[4742]: I1205 05:56:43.055566 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 05 05:56:43 crc kubenswrapper[4742]: I1205 05:56:43.204969 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 05 05:56:43 crc kubenswrapper[4742]: I1205 05:56:43.319924 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 05 05:56:43 crc kubenswrapper[4742]: I1205 05:56:43.395182 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 05 05:56:43 crc kubenswrapper[4742]: I1205 05:56:43.461492 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 05 05:56:43 crc kubenswrapper[4742]: I1205 05:56:43.539496 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 05 05:56:43 crc kubenswrapper[4742]: I1205 05:56:43.706709 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 05 05:56:43 crc kubenswrapper[4742]: I1205 05:56:43.759436 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 05 05:56:43 crc kubenswrapper[4742]: I1205 05:56:43.784624 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 05 05:56:43 crc kubenswrapper[4742]: I1205 05:56:43.802288 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 05 05:56:43 crc kubenswrapper[4742]: I1205 05:56:43.842456 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 05 05:56:44 crc kubenswrapper[4742]: I1205 05:56:44.065852 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 05 05:56:44 crc kubenswrapper[4742]: I1205 05:56:44.127862 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 05 05:56:44 crc kubenswrapper[4742]: I1205 05:56:44.295418 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 05 05:56:44 crc kubenswrapper[4742]: I1205 05:56:44.404568 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 05 05:56:44 crc kubenswrapper[4742]: I1205 05:56:44.422723 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 05 05:56:44 crc kubenswrapper[4742]: I1205 05:56:44.427218 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 05:56:44 crc kubenswrapper[4742]: I1205 05:56:44.461405 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 05 05:56:44 crc kubenswrapper[4742]: I1205 05:56:44.580765 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 05 05:56:44 crc kubenswrapper[4742]: I1205 05:56:44.647402 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 05:56:44 crc kubenswrapper[4742]: I1205 05:56:44.669445 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 05 05:56:44 crc kubenswrapper[4742]: I1205 05:56:44.827994 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 05 05:56:44 crc kubenswrapper[4742]: I1205 05:56:44.882180 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 05 05:56:44 crc kubenswrapper[4742]: I1205 05:56:44.894574 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 05 05:56:44 crc kubenswrapper[4742]: I1205 05:56:44.931393 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 05 05:56:44 crc kubenswrapper[4742]: I1205 05:56:44.936000 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 05 05:56:44 crc kubenswrapper[4742]: I1205 05:56:44.969247 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 05 05:56:45 crc kubenswrapper[4742]: I1205 05:56:45.023203 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 05 05:56:45 crc kubenswrapper[4742]: I1205 05:56:45.091155 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 05 05:56:45 crc kubenswrapper[4742]: I1205 05:56:45.113465 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 05:56:45 crc kubenswrapper[4742]: I1205 05:56:45.154850 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 05 05:56:45 crc kubenswrapper[4742]: I1205 05:56:45.160386 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 05 05:56:45 crc kubenswrapper[4742]: I1205 05:56:45.168854 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 05 05:56:45 crc kubenswrapper[4742]: I1205 05:56:45.259225 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 05 05:56:45 crc kubenswrapper[4742]: I1205 05:56:45.267797 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 05 05:56:45 crc kubenswrapper[4742]: I1205 05:56:45.278632 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 05 05:56:45 crc kubenswrapper[4742]: I1205 05:56:45.479758 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 05 05:56:45 crc kubenswrapper[4742]: I1205 05:56:45.539731 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 05 05:56:45 crc kubenswrapper[4742]: I1205 05:56:45.544136 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 05 05:56:45 crc kubenswrapper[4742]: I1205 05:56:45.594133 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 05 05:56:45 crc kubenswrapper[4742]: I1205 05:56:45.687676 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 05:56:45 crc kubenswrapper[4742]: I1205 05:56:45.689235 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 05 05:56:45 crc kubenswrapper[4742]: I1205 05:56:45.887362 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 05 05:56:45 crc kubenswrapper[4742]: I1205 05:56:45.889988 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 05 05:56:45 crc kubenswrapper[4742]: I1205 05:56:45.943972 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 05 05:56:46 crc kubenswrapper[4742]: I1205 05:56:46.039358 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 05 05:56:46 crc kubenswrapper[4742]: I1205 05:56:46.133620 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 05 05:56:46 crc kubenswrapper[4742]: I1205 05:56:46.190549 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 05 05:56:46 crc kubenswrapper[4742]: I1205 05:56:46.209350 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 05 05:56:46 crc kubenswrapper[4742]: I1205 05:56:46.228762 4742 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 05 05:56:46 crc kubenswrapper[4742]: I1205 05:56:46.472693 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 05 05:56:46 crc kubenswrapper[4742]: I1205 05:56:46.473170 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 05 05:56:46 crc kubenswrapper[4742]: I1205 05:56:46.529687 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 05 05:56:46 crc kubenswrapper[4742]: I1205 05:56:46.598538 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 05 05:56:46 crc kubenswrapper[4742]: I1205 05:56:46.668050 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 05 05:56:46 crc kubenswrapper[4742]: I1205 05:56:46.669177 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 05 05:56:46 crc kubenswrapper[4742]: I1205 05:56:46.718360 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 05 05:56:46 crc kubenswrapper[4742]: I1205 05:56:46.786126 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 05 05:56:46 crc kubenswrapper[4742]: I1205 05:56:46.786386 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 05 05:56:46 crc kubenswrapper[4742]: I1205 05:56:46.794081 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 05 05:56:46 crc kubenswrapper[4742]: I1205 05:56:46.809470 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 05:56:46 crc kubenswrapper[4742]: I1205 05:56:46.837874 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 05 05:56:46 crc kubenswrapper[4742]: I1205 05:56:46.852838 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 05 05:56:46 crc kubenswrapper[4742]: I1205 05:56:46.902936 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 05 05:56:46 crc kubenswrapper[4742]: I1205 05:56:46.912379 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 05 05:56:46 crc kubenswrapper[4742]: I1205 05:56:46.913919 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 05 05:56:47 crc kubenswrapper[4742]: I1205 05:56:47.037824 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 05 05:56:47 crc kubenswrapper[4742]: I1205 05:56:47.067981 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 05 05:56:47 crc kubenswrapper[4742]: I1205 05:56:47.118081 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 05 05:56:47 crc kubenswrapper[4742]: I1205 05:56:47.173465 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 05 05:56:47 crc kubenswrapper[4742]: I1205 05:56:47.181358 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 05 05:56:47 crc kubenswrapper[4742]: I1205 05:56:47.190019 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 05 05:56:47 crc kubenswrapper[4742]: I1205 05:56:47.356649 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 05 05:56:47 crc kubenswrapper[4742]: I1205 05:56:47.442263 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 05:56:47 crc kubenswrapper[4742]: I1205 05:56:47.444824 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 05 05:56:47 crc kubenswrapper[4742]: I1205 05:56:47.466413 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 05 05:56:47 crc kubenswrapper[4742]: I1205 05:56:47.468555 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 05 05:56:47 crc kubenswrapper[4742]: I1205 05:56:47.593956 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 05 05:56:47 crc kubenswrapper[4742]: I1205 05:56:47.608000 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 05 05:56:47 crc kubenswrapper[4742]: I1205 05:56:47.756665 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 05:56:47 crc kubenswrapper[4742]: I1205 05:56:47.822616 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 05 05:56:47 crc kubenswrapper[4742]: I1205 05:56:47.854983 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 05 05:56:47 crc kubenswrapper[4742]: I1205 05:56:47.959557 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 05 05:56:48 crc kubenswrapper[4742]: I1205 05:56:48.012939 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 05 05:56:48 crc kubenswrapper[4742]: I1205 05:56:48.049530 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 05 05:56:48 crc kubenswrapper[4742]: I1205 05:56:48.117021 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 05 05:56:48 crc kubenswrapper[4742]: I1205 05:56:48.184568 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 05 05:56:48 crc kubenswrapper[4742]: I1205 05:56:48.218869 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 05 05:56:48 crc kubenswrapper[4742]: I1205 05:56:48.225610 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 05 05:56:48 crc kubenswrapper[4742]: I1205 05:56:48.225617 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 05 05:56:48 crc kubenswrapper[4742]: I1205 05:56:48.255102 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 05 05:56:48 crc kubenswrapper[4742]: I1205 05:56:48.255290 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 05 05:56:48 crc kubenswrapper[4742]: I1205 05:56:48.274676 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 05 05:56:48 crc kubenswrapper[4742]: I1205 05:56:48.317714 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 05 05:56:48 crc kubenswrapper[4742]: I1205 05:56:48.331023 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 05 05:56:48 crc kubenswrapper[4742]: I1205 05:56:48.502348 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 05 05:56:48 crc kubenswrapper[4742]: I1205 05:56:48.557158 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 05 05:56:48 crc kubenswrapper[4742]: I1205 05:56:48.618344 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 05 05:56:48 crc kubenswrapper[4742]: I1205 05:56:48.716244 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 05:56:48 crc kubenswrapper[4742]: I1205 05:56:48.791505 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 05:56:48 crc kubenswrapper[4742]: I1205 05:56:48.815230 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 05 05:56:48 crc kubenswrapper[4742]: I1205 05:56:48.905832 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 05 05:56:49 crc kubenswrapper[4742]: I1205 05:56:49.006304 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 05 05:56:49 crc kubenswrapper[4742]: I1205 05:56:49.073926 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 05 05:56:49 crc kubenswrapper[4742]: I1205 05:56:49.133778 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 05 05:56:49 crc kubenswrapper[4742]: I1205 05:56:49.166433 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 05 05:56:49 crc kubenswrapper[4742]: I1205 05:56:49.219324 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 05 05:56:49 crc kubenswrapper[4742]: I1205 05:56:49.241083 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 05 05:56:49 crc kubenswrapper[4742]: I1205 05:56:49.267020 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 05 05:56:49 crc kubenswrapper[4742]: I1205 05:56:49.302839 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 05 05:56:49 crc kubenswrapper[4742]: I1205 05:56:49.378432 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 05 05:56:49 crc kubenswrapper[4742]: I1205 05:56:49.395198 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 05 05:56:49 crc kubenswrapper[4742]: I1205 05:56:49.457567 4742 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 05 05:56:49 crc kubenswrapper[4742]: I1205 05:56:49.529235 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 05 05:56:49 crc kubenswrapper[4742]: I1205 05:56:49.648563 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 05 05:56:49 crc kubenswrapper[4742]: I1205 05:56:49.696842 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 05:56:49 crc kubenswrapper[4742]: I1205 05:56:49.742207 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 05 05:56:49 crc kubenswrapper[4742]: I1205 05:56:49.841758 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 05 05:56:49 crc kubenswrapper[4742]: I1205 05:56:49.902019 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 05 05:56:49 crc kubenswrapper[4742]: I1205 05:56:49.951276 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 05 05:56:49 crc kubenswrapper[4742]: I1205 05:56:49.995917 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 05 05:56:50 crc kubenswrapper[4742]: I1205 05:56:50.100146 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 05 05:56:50 crc kubenswrapper[4742]: I1205 05:56:50.124009 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 05 05:56:50 crc kubenswrapper[4742]: I1205 05:56:50.158637 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 05 05:56:50 crc kubenswrapper[4742]: I1205 05:56:50.179126 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 05 05:56:50 crc kubenswrapper[4742]: I1205 05:56:50.186573 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 05 05:56:50 crc kubenswrapper[4742]: I1205 05:56:50.219317 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 05 05:56:50 crc kubenswrapper[4742]: I1205 05:56:50.329898 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 05 05:56:50 crc kubenswrapper[4742]: I1205 05:56:50.329994 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 05 05:56:50 crc kubenswrapper[4742]: I1205 05:56:50.375876 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 05 05:56:50 crc kubenswrapper[4742]: I1205 05:56:50.479449 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 05 05:56:50 crc kubenswrapper[4742]: I1205 05:56:50.500477 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 05 05:56:50 crc kubenswrapper[4742]: I1205 05:56:50.624964 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 05 05:56:50 crc kubenswrapper[4742]: I1205 05:56:50.692829 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 05 05:56:50 crc kubenswrapper[4742]: I1205 05:56:50.716289 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 05 05:56:50 crc kubenswrapper[4742]: I1205 05:56:50.757360 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 05 05:56:50 crc kubenswrapper[4742]: I1205 05:56:50.848415 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 05:56:50 crc kubenswrapper[4742]: I1205 05:56:50.977243 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 05 05:56:51 crc kubenswrapper[4742]: I1205 05:56:51.018588 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 05 05:56:51 crc kubenswrapper[4742]: I1205 05:56:51.090434 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 05 05:56:51 crc kubenswrapper[4742]: I1205 05:56:51.148694 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 05 05:56:51 crc kubenswrapper[4742]: I1205 05:56:51.182524 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 05 05:56:51 crc kubenswrapper[4742]: I1205 05:56:51.206353 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 05 05:56:51 crc kubenswrapper[4742]: I1205 05:56:51.693784 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 05 05:56:51 crc kubenswrapper[4742]: I1205 05:56:51.760818 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 05 05:56:51 crc kubenswrapper[4742]: I1205 05:56:51.770551 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 05 05:56:51 crc kubenswrapper[4742]: I1205 05:56:51.788767 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 05 05:56:51 crc kubenswrapper[4742]: I1205 05:56:51.821022 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 05 05:56:51 crc kubenswrapper[4742]: I1205 05:56:51.969591 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 05 05:56:52 crc kubenswrapper[4742]: I1205 05:56:52.038334 4742 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 05 05:56:52 crc kubenswrapper[4742]: I1205 05:56:52.070821 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 05 05:56:52 crc kubenswrapper[4742]: I1205 05:56:52.103681 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 05 05:56:52 crc kubenswrapper[4742]: I1205 05:56:52.251617 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 05 05:56:52 crc kubenswrapper[4742]: I1205 05:56:52.287868 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 05 05:56:52 crc kubenswrapper[4742]: I1205 05:56:52.317352 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 05 05:56:52 crc kubenswrapper[4742]: I1205 05:56:52.319448 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 05 05:56:52 crc kubenswrapper[4742]: I1205 05:56:52.324638 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 05 05:56:52 crc kubenswrapper[4742]: I1205 05:56:52.354861 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 05 05:56:52 crc kubenswrapper[4742]: I1205 05:56:52.515944 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 05 05:56:52 crc kubenswrapper[4742]: I1205 05:56:52.543452 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 05 05:56:52 crc kubenswrapper[4742]: I1205 05:56:52.550602 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 05 05:56:52 crc kubenswrapper[4742]: I1205 05:56:52.579422 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 05 05:56:52 crc kubenswrapper[4742]: I1205 05:56:52.600353 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 05 05:56:52 crc kubenswrapper[4742]: I1205 05:56:52.602446 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 05 05:56:52 crc kubenswrapper[4742]: I1205 05:56:52.616854 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 05 05:56:52 crc kubenswrapper[4742]: I1205 05:56:52.638297 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 05 05:56:52 crc kubenswrapper[4742]: I1205 05:56:52.638894 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 05 05:56:52 crc kubenswrapper[4742]: I1205 05:56:52.677241 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 05:56:52 crc kubenswrapper[4742]: I1205 05:56:52.841398 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 05 05:56:52 crc kubenswrapper[4742]: I1205 05:56:52.853563 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 05 05:56:52 crc kubenswrapper[4742]: I1205 05:56:52.919462 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 05 05:56:53 crc kubenswrapper[4742]: I1205 05:56:53.071628 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 05 05:56:53 crc kubenswrapper[4742]: I1205 05:56:53.138581 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 05 05:56:53 crc kubenswrapper[4742]: I1205 05:56:53.144566 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 05 05:56:53 crc kubenswrapper[4742]: I1205 05:56:53.176643 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 05 05:56:53 crc kubenswrapper[4742]: I1205 05:56:53.211782 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 05 05:56:53 crc kubenswrapper[4742]: I1205 05:56:53.236615 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 05 05:56:53 crc kubenswrapper[4742]: I1205 05:56:53.261166 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 05 05:56:53 crc kubenswrapper[4742]: I1205 05:56:53.373761 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 05 05:56:53 crc kubenswrapper[4742]: I1205 05:56:53.418308 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 05 05:56:53 crc kubenswrapper[4742]: I1205 05:56:53.489542 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 05:56:53 crc kubenswrapper[4742]: I1205 05:56:53.546483 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 05 05:56:53 crc kubenswrapper[4742]: I1205 05:56:53.799894 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 05 05:56:53 crc kubenswrapper[4742]: I1205 05:56:53.810563 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 05 05:56:53 crc kubenswrapper[4742]: I1205 05:56:53.905593 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.020281 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.058743 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.156425 4742 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.162346 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.182410 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.251698 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.578810 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.591387 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.628296 4742 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.791243 4742 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.791957 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=46.791941159 podStartE2EDuration="46.791941159s" podCreationTimestamp="2025-12-05 05:56:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:56:30.56977989 +0000 UTC m=+266.481914952" watchObservedRunningTime="2025-12-05 05:56:54.791941159 +0000 UTC m=+290.704076231" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.796280 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-45686","openshift-marketplace/redhat-operators-7kmqp","openshift-kube-apiserver/kube-apiserver-crc","openshift-marketplace/redhat-marketplace-pwkx9"] Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.796358 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-54b5c98c4-s8mpm","openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 05:56:54 crc kubenswrapper[4742]: E1205 05:56:54.796544 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c433daac-2067-47ed-ba5c-01ae452a511d" containerName="extract-content" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.796564 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="c433daac-2067-47ed-ba5c-01ae452a511d" containerName="extract-content" Dec 05 05:56:54 crc kubenswrapper[4742]: E1205 05:56:54.796584 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43ebbfe9-74ee-405b-82d8-d4a825a7386d" containerName="extract-utilities" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.796594 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="43ebbfe9-74ee-405b-82d8-d4a825a7386d" containerName="extract-utilities" Dec 05 05:56:54 crc kubenswrapper[4742]: E1205 05:56:54.796603 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bb19c95-d3a5-4418-8787-82e424244073" containerName="installer" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.796611 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bb19c95-d3a5-4418-8787-82e424244073" containerName="installer" Dec 05 05:56:54 crc kubenswrapper[4742]: E1205 05:56:54.796624 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3d9ea9f-6af6-42ea-9298-2e970da2572e" containerName="oauth-openshift" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.796632 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3d9ea9f-6af6-42ea-9298-2e970da2572e" containerName="oauth-openshift" Dec 05 05:56:54 crc kubenswrapper[4742]: E1205 05:56:54.796647 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c433daac-2067-47ed-ba5c-01ae452a511d" containerName="extract-utilities" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.796656 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="c433daac-2067-47ed-ba5c-01ae452a511d" containerName="extract-utilities" Dec 05 05:56:54 crc kubenswrapper[4742]: E1205 05:56:54.796670 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c433daac-2067-47ed-ba5c-01ae452a511d" containerName="registry-server" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.796679 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="c433daac-2067-47ed-ba5c-01ae452a511d" containerName="registry-server" Dec 05 05:56:54 crc kubenswrapper[4742]: E1205 05:56:54.796692 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43ebbfe9-74ee-405b-82d8-d4a825a7386d" containerName="extract-content" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.796701 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="43ebbfe9-74ee-405b-82d8-d4a825a7386d" containerName="extract-content" Dec 05 05:56:54 crc kubenswrapper[4742]: E1205 05:56:54.796714 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43ebbfe9-74ee-405b-82d8-d4a825a7386d" containerName="registry-server" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.796723 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="43ebbfe9-74ee-405b-82d8-d4a825a7386d" containerName="registry-server" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.796835 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3d9ea9f-6af6-42ea-9298-2e970da2572e" containerName="oauth-openshift" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.796847 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bb19c95-d3a5-4418-8787-82e424244073" containerName="installer" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.796858 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="c433daac-2067-47ed-ba5c-01ae452a511d" containerName="registry-server" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.796870 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="43ebbfe9-74ee-405b-82d8-d4a825a7386d" containerName="registry-server" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.797190 4742 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5ea18901-cdec-4f7a-96c3-610bb6b9eef5" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.797236 4742 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5ea18901-cdec-4f7a-96c3-610bb6b9eef5" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.797308 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.799711 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.800022 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.800462 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.800617 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.800781 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.800923 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.801624 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.801772 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.802260 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.802326 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.802568 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.805163 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.808548 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.809195 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.809489 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.813927 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.820642 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.828230 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.855866 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=24.855851232 podStartE2EDuration="24.855851232s" podCreationTimestamp="2025-12-05 05:56:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:56:54.854227946 +0000 UTC m=+290.766363008" watchObservedRunningTime="2025-12-05 05:56:54.855851232 +0000 UTC m=+290.767986294" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.940776 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.940831 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-system-service-ca\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.940858 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-user-template-error\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.940893 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zqd4\" (UniqueName: \"kubernetes.io/projected/493bb8ad-5f64-40b5-acba-8c2668ed364c-kube-api-access-2zqd4\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.940939 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.940967 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.940982 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.941002 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/493bb8ad-5f64-40b5-acba-8c2668ed364c-audit-dir\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.941017 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-system-session\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.941329 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.941472 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-system-router-certs\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.941532 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/493bb8ad-5f64-40b5-acba-8c2668ed364c-audit-policies\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.941551 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.941570 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:54 crc kubenswrapper[4742]: I1205 05:56:54.941615 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-user-template-login\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.001560 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.035796 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-54b5c98c4-s8mpm"] Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.042230 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.042269 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-user-template-error\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.042287 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-system-service-ca\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.042306 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zqd4\" (UniqueName: \"kubernetes.io/projected/493bb8ad-5f64-40b5-acba-8c2668ed364c-kube-api-access-2zqd4\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.042336 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.042352 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.042366 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.042383 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/493bb8ad-5f64-40b5-acba-8c2668ed364c-audit-dir\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.042397 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-system-session\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.042414 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-system-router-certs\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.042439 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/493bb8ad-5f64-40b5-acba-8c2668ed364c-audit-policies\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.042457 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.042475 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.042495 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-user-template-login\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.043613 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.044219 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-system-service-ca\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.044852 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.045518 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-system-cliconfig\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.045524 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/493bb8ad-5f64-40b5-acba-8c2668ed364c-audit-policies\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.045778 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/493bb8ad-5f64-40b5-acba-8c2668ed364c-audit-dir\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.049322 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.050917 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-system-router-certs\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.051743 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.058500 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-user-template-error\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.058673 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.059028 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-system-session\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.062435 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-user-template-login\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.062648 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/493bb8ad-5f64-40b5-acba-8c2668ed364c-v4-0-config-system-serving-cert\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.062765 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zqd4\" (UniqueName: \"kubernetes.io/projected/493bb8ad-5f64-40b5-acba-8c2668ed364c-kube-api-access-2zqd4\") pod \"oauth-openshift-54b5c98c4-s8mpm\" (UID: \"493bb8ad-5f64-40b5-acba-8c2668ed364c\") " pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.076839 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.105724 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.118851 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.127427 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.208275 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.342444 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.348021 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-54b5c98c4-s8mpm"] Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.380200 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.567656 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.645808 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.735274 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.813632 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.884301 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.894277 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" event={"ID":"493bb8ad-5f64-40b5-acba-8c2668ed364c","Type":"ContainerStarted","Data":"6a9e1f895a374b25083123042e2f1a65f338e82b26b5b63d6d96b753eb5b11c7"} Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.894325 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" event={"ID":"493bb8ad-5f64-40b5-acba-8c2668ed364c","Type":"ContainerStarted","Data":"2a666cb66579fbe7f2d8ff0bf5eaad13e98c8a8679d40975f35be867a9784ab8"} Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.894587 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.927767 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" podStartSLOduration=52.927739221 podStartE2EDuration="52.927739221s" podCreationTimestamp="2025-12-05 05:56:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:56:55.920102525 +0000 UTC m=+291.832237597" watchObservedRunningTime="2025-12-05 05:56:55.927739221 +0000 UTC m=+291.839874343" Dec 05 05:56:55 crc kubenswrapper[4742]: I1205 05:56:55.952579 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-54b5c98c4-s8mpm" Dec 05 05:56:56 crc kubenswrapper[4742]: I1205 05:56:56.174841 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 05 05:56:56 crc kubenswrapper[4742]: I1205 05:56:56.240429 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 05:56:56 crc kubenswrapper[4742]: I1205 05:56:56.358610 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 05 05:56:56 crc kubenswrapper[4742]: I1205 05:56:56.391167 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43ebbfe9-74ee-405b-82d8-d4a825a7386d" path="/var/lib/kubelet/pods/43ebbfe9-74ee-405b-82d8-d4a825a7386d/volumes" Dec 05 05:56:56 crc kubenswrapper[4742]: I1205 05:56:56.392445 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c433daac-2067-47ed-ba5c-01ae452a511d" path="/var/lib/kubelet/pods/c433daac-2067-47ed-ba5c-01ae452a511d/volumes" Dec 05 05:56:56 crc kubenswrapper[4742]: I1205 05:56:56.393362 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3d9ea9f-6af6-42ea-9298-2e970da2572e" path="/var/lib/kubelet/pods/f3d9ea9f-6af6-42ea-9298-2e970da2572e/volumes" Dec 05 05:56:56 crc kubenswrapper[4742]: I1205 05:56:56.396491 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 05 05:56:57 crc kubenswrapper[4742]: I1205 05:56:57.351099 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 05 05:56:57 crc kubenswrapper[4742]: I1205 05:56:57.547425 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 05 05:57:02 crc kubenswrapper[4742]: I1205 05:57:02.954241 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dpnl4"] Dec 05 05:57:02 crc kubenswrapper[4742]: I1205 05:57:02.955354 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-dpnl4" podUID="8eed6205-7703-433f-83cf-d7b51867e5ee" containerName="registry-server" containerID="cri-o://deca516b9100434de9391d51109ccee090c931ca4c65fc0fb55194b6b2543c51" gracePeriod=30 Dec 05 05:57:02 crc kubenswrapper[4742]: I1205 05:57:02.965899 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kjh59"] Dec 05 05:57:02 crc kubenswrapper[4742]: I1205 05:57:02.966239 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-kjh59" podUID="ff4c4d35-276e-47e9-8b12-76361e2005bf" containerName="registry-server" containerID="cri-o://6f0f0322a7bac20ced74a7b9f9dab148eaa5ec992b9a9b5d14b70312070e2260" gracePeriod=30 Dec 05 05:57:02 crc kubenswrapper[4742]: I1205 05:57:02.979751 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-dfmr9"] Dec 05 05:57:02 crc kubenswrapper[4742]: I1205 05:57:02.980028 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-dfmr9" podUID="997634d0-c379-4978-a8a5-4da39a072ff4" containerName="marketplace-operator" containerID="cri-o://b4d563ffc52fc72b39f577ec801c1678145af630b95fecd42e8816988ba906fa" gracePeriod=30 Dec 05 05:57:02 crc kubenswrapper[4742]: I1205 05:57:02.989600 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-89q4d"] Dec 05 05:57:02 crc kubenswrapper[4742]: I1205 05:57:02.993290 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-89q4d" podUID="d8ed5ac0-ecdc-4f4f-a13b-223289da1f67" containerName="registry-server" containerID="cri-o://26c4d48595677dec3b46779cce238cfdcf84d132832ed98cca8fb7f539df8776" gracePeriod=30 Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.021025 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gwx44"] Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.021387 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-gwx44" podUID="04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c" containerName="registry-server" containerID="cri-o://69abf936a854fbec80f87a64444ddeed3040064c9de7a164e6ad063408d1eee5" gracePeriod=30 Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.025774 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qhk9s"] Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.035682 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qhk9s"] Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.035775 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-qhk9s" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.168075 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qtwg6\" (UniqueName: \"kubernetes.io/projected/1c4ffc9a-27a6-44a8-9537-bd58e3fc8b7d-kube-api-access-qtwg6\") pod \"marketplace-operator-79b997595-qhk9s\" (UID: \"1c4ffc9a-27a6-44a8-9537-bd58e3fc8b7d\") " pod="openshift-marketplace/marketplace-operator-79b997595-qhk9s" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.168138 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1c4ffc9a-27a6-44a8-9537-bd58e3fc8b7d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-qhk9s\" (UID: \"1c4ffc9a-27a6-44a8-9537-bd58e3fc8b7d\") " pod="openshift-marketplace/marketplace-operator-79b997595-qhk9s" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.168166 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1c4ffc9a-27a6-44a8-9537-bd58e3fc8b7d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-qhk9s\" (UID: \"1c4ffc9a-27a6-44a8-9537-bd58e3fc8b7d\") " pod="openshift-marketplace/marketplace-operator-79b997595-qhk9s" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.272257 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qtwg6\" (UniqueName: \"kubernetes.io/projected/1c4ffc9a-27a6-44a8-9537-bd58e3fc8b7d-kube-api-access-qtwg6\") pod \"marketplace-operator-79b997595-qhk9s\" (UID: \"1c4ffc9a-27a6-44a8-9537-bd58e3fc8b7d\") " pod="openshift-marketplace/marketplace-operator-79b997595-qhk9s" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.272301 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1c4ffc9a-27a6-44a8-9537-bd58e3fc8b7d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-qhk9s\" (UID: \"1c4ffc9a-27a6-44a8-9537-bd58e3fc8b7d\") " pod="openshift-marketplace/marketplace-operator-79b997595-qhk9s" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.272327 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1c4ffc9a-27a6-44a8-9537-bd58e3fc8b7d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-qhk9s\" (UID: \"1c4ffc9a-27a6-44a8-9537-bd58e3fc8b7d\") " pod="openshift-marketplace/marketplace-operator-79b997595-qhk9s" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.274223 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1c4ffc9a-27a6-44a8-9537-bd58e3fc8b7d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-qhk9s\" (UID: \"1c4ffc9a-27a6-44a8-9537-bd58e3fc8b7d\") " pod="openshift-marketplace/marketplace-operator-79b997595-qhk9s" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.279295 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/1c4ffc9a-27a6-44a8-9537-bd58e3fc8b7d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-qhk9s\" (UID: \"1c4ffc9a-27a6-44a8-9537-bd58e3fc8b7d\") " pod="openshift-marketplace/marketplace-operator-79b997595-qhk9s" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.289242 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qtwg6\" (UniqueName: \"kubernetes.io/projected/1c4ffc9a-27a6-44a8-9537-bd58e3fc8b7d-kube-api-access-qtwg6\") pod \"marketplace-operator-79b997595-qhk9s\" (UID: \"1c4ffc9a-27a6-44a8-9537-bd58e3fc8b7d\") " pod="openshift-marketplace/marketplace-operator-79b997595-qhk9s" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.354850 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-qhk9s" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.481762 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-89q4d" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.482221 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gwx44" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.676684 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c-utilities\") pod \"04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c\" (UID: \"04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c\") " Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.676763 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q89wr\" (UniqueName: \"kubernetes.io/projected/04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c-kube-api-access-q89wr\") pod \"04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c\" (UID: \"04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c\") " Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.676854 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c-catalog-content\") pod \"04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c\" (UID: \"04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c\") " Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.676908 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8ed5ac0-ecdc-4f4f-a13b-223289da1f67-catalog-content\") pod \"d8ed5ac0-ecdc-4f4f-a13b-223289da1f67\" (UID: \"d8ed5ac0-ecdc-4f4f-a13b-223289da1f67\") " Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.676967 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pnmlc\" (UniqueName: \"kubernetes.io/projected/d8ed5ac0-ecdc-4f4f-a13b-223289da1f67-kube-api-access-pnmlc\") pod \"d8ed5ac0-ecdc-4f4f-a13b-223289da1f67\" (UID: \"d8ed5ac0-ecdc-4f4f-a13b-223289da1f67\") " Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.677006 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8ed5ac0-ecdc-4f4f-a13b-223289da1f67-utilities\") pod \"d8ed5ac0-ecdc-4f4f-a13b-223289da1f67\" (UID: \"d8ed5ac0-ecdc-4f4f-a13b-223289da1f67\") " Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.677829 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c-utilities" (OuterVolumeSpecName: "utilities") pod "04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c" (UID: "04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.678637 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8ed5ac0-ecdc-4f4f-a13b-223289da1f67-utilities" (OuterVolumeSpecName: "utilities") pod "d8ed5ac0-ecdc-4f4f-a13b-223289da1f67" (UID: "d8ed5ac0-ecdc-4f4f-a13b-223289da1f67"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.682838 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8ed5ac0-ecdc-4f4f-a13b-223289da1f67-kube-api-access-pnmlc" (OuterVolumeSpecName: "kube-api-access-pnmlc") pod "d8ed5ac0-ecdc-4f4f-a13b-223289da1f67" (UID: "d8ed5ac0-ecdc-4f4f-a13b-223289da1f67"). InnerVolumeSpecName "kube-api-access-pnmlc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.682996 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c-kube-api-access-q89wr" (OuterVolumeSpecName: "kube-api-access-q89wr") pod "04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c" (UID: "04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c"). InnerVolumeSpecName "kube-api-access-q89wr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.695173 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8ed5ac0-ecdc-4f4f-a13b-223289da1f67-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d8ed5ac0-ecdc-4f4f-a13b-223289da1f67" (UID: "d8ed5ac0-ecdc-4f4f-a13b-223289da1f67"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.740317 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qhk9s"] Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.778568 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8ed5ac0-ecdc-4f4f-a13b-223289da1f67-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.778607 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8ed5ac0-ecdc-4f4f-a13b-223289da1f67-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.778620 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pnmlc\" (UniqueName: \"kubernetes.io/projected/d8ed5ac0-ecdc-4f4f-a13b-223289da1f67-kube-api-access-pnmlc\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.778632 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.778646 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q89wr\" (UniqueName: \"kubernetes.io/projected/04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c-kube-api-access-q89wr\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.816877 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c" (UID: "04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.837240 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dpnl4" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.859835 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-dfmr9" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.867935 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kjh59" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.880569 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.960418 4742 generic.go:334] "Generic (PLEG): container finished" podID="d8ed5ac0-ecdc-4f4f-a13b-223289da1f67" containerID="26c4d48595677dec3b46779cce238cfdcf84d132832ed98cca8fb7f539df8776" exitCode=0 Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.960484 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-89q4d" event={"ID":"d8ed5ac0-ecdc-4f4f-a13b-223289da1f67","Type":"ContainerDied","Data":"26c4d48595677dec3b46779cce238cfdcf84d132832ed98cca8fb7f539df8776"} Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.960519 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-89q4d" event={"ID":"d8ed5ac0-ecdc-4f4f-a13b-223289da1f67","Type":"ContainerDied","Data":"290a114c945b0a075acbd763cd261322d49ea2a763e35da2d9cad64fb3c4dbd1"} Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.960545 4742 scope.go:117] "RemoveContainer" containerID="26c4d48595677dec3b46779cce238cfdcf84d132832ed98cca8fb7f539df8776" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.960691 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-89q4d" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.968181 4742 generic.go:334] "Generic (PLEG): container finished" podID="ff4c4d35-276e-47e9-8b12-76361e2005bf" containerID="6f0f0322a7bac20ced74a7b9f9dab148eaa5ec992b9a9b5d14b70312070e2260" exitCode=0 Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.968248 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kjh59" event={"ID":"ff4c4d35-276e-47e9-8b12-76361e2005bf","Type":"ContainerDied","Data":"6f0f0322a7bac20ced74a7b9f9dab148eaa5ec992b9a9b5d14b70312070e2260"} Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.968274 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kjh59" event={"ID":"ff4c4d35-276e-47e9-8b12-76361e2005bf","Type":"ContainerDied","Data":"3fa7c9e56854e1eca485a44e9a455701762e085655f3daae3789a8d0b06e5024"} Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.968603 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kjh59" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.977620 4742 generic.go:334] "Generic (PLEG): container finished" podID="997634d0-c379-4978-a8a5-4da39a072ff4" containerID="b4d563ffc52fc72b39f577ec801c1678145af630b95fecd42e8816988ba906fa" exitCode=0 Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.977690 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-dfmr9" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.977744 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-dfmr9" event={"ID":"997634d0-c379-4978-a8a5-4da39a072ff4","Type":"ContainerDied","Data":"b4d563ffc52fc72b39f577ec801c1678145af630b95fecd42e8816988ba906fa"} Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.977777 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-dfmr9" event={"ID":"997634d0-c379-4978-a8a5-4da39a072ff4","Type":"ContainerDied","Data":"0c79ca110f360b0197a2ac7f545dd0a176e83f72484aaac94d2ed1b3aad090ae"} Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.981648 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/997634d0-c379-4978-a8a5-4da39a072ff4-marketplace-trusted-ca\") pod \"997634d0-c379-4978-a8a5-4da39a072ff4\" (UID: \"997634d0-c379-4978-a8a5-4da39a072ff4\") " Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.981684 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff4c4d35-276e-47e9-8b12-76361e2005bf-utilities\") pod \"ff4c4d35-276e-47e9-8b12-76361e2005bf\" (UID: \"ff4c4d35-276e-47e9-8b12-76361e2005bf\") " Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.981716 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pgz9p\" (UniqueName: \"kubernetes.io/projected/ff4c4d35-276e-47e9-8b12-76361e2005bf-kube-api-access-pgz9p\") pod \"ff4c4d35-276e-47e9-8b12-76361e2005bf\" (UID: \"ff4c4d35-276e-47e9-8b12-76361e2005bf\") " Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.981740 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8eed6205-7703-433f-83cf-d7b51867e5ee-catalog-content\") pod \"8eed6205-7703-433f-83cf-d7b51867e5ee\" (UID: \"8eed6205-7703-433f-83cf-d7b51867e5ee\") " Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.981778 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/997634d0-c379-4978-a8a5-4da39a072ff4-marketplace-operator-metrics\") pod \"997634d0-c379-4978-a8a5-4da39a072ff4\" (UID: \"997634d0-c379-4978-a8a5-4da39a072ff4\") " Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.981818 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8eed6205-7703-433f-83cf-d7b51867e5ee-utilities\") pod \"8eed6205-7703-433f-83cf-d7b51867e5ee\" (UID: \"8eed6205-7703-433f-83cf-d7b51867e5ee\") " Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.981873 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-clrzk\" (UniqueName: \"kubernetes.io/projected/997634d0-c379-4978-a8a5-4da39a072ff4-kube-api-access-clrzk\") pod \"997634d0-c379-4978-a8a5-4da39a072ff4\" (UID: \"997634d0-c379-4978-a8a5-4da39a072ff4\") " Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.981921 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff4c4d35-276e-47e9-8b12-76361e2005bf-catalog-content\") pod \"ff4c4d35-276e-47e9-8b12-76361e2005bf\" (UID: \"ff4c4d35-276e-47e9-8b12-76361e2005bf\") " Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.981968 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hn2bp\" (UniqueName: \"kubernetes.io/projected/8eed6205-7703-433f-83cf-d7b51867e5ee-kube-api-access-hn2bp\") pod \"8eed6205-7703-433f-83cf-d7b51867e5ee\" (UID: \"8eed6205-7703-433f-83cf-d7b51867e5ee\") " Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.981980 4742 scope.go:117] "RemoveContainer" containerID="d6695e144be5489e2f6725bfd4af346873154e90293f75d83b786b32b7f10e82" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.982564 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/997634d0-c379-4978-a8a5-4da39a072ff4-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "997634d0-c379-4978-a8a5-4da39a072ff4" (UID: "997634d0-c379-4978-a8a5-4da39a072ff4"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.983703 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8eed6205-7703-433f-83cf-d7b51867e5ee-utilities" (OuterVolumeSpecName: "utilities") pod "8eed6205-7703-433f-83cf-d7b51867e5ee" (UID: "8eed6205-7703-433f-83cf-d7b51867e5ee"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.984459 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-qhk9s" event={"ID":"1c4ffc9a-27a6-44a8-9537-bd58e3fc8b7d","Type":"ContainerStarted","Data":"94c72f21bd5966a01fa832228dd949b1426827bff00b195b593cdd73e43676b7"} Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.984503 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-qhk9s" event={"ID":"1c4ffc9a-27a6-44a8-9537-bd58e3fc8b7d","Type":"ContainerStarted","Data":"72a7cf30567aecdd2af6338698103418f056f20b77fb5a9b8c83b4247e05338f"} Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.984873 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff4c4d35-276e-47e9-8b12-76361e2005bf-utilities" (OuterVolumeSpecName: "utilities") pod "ff4c4d35-276e-47e9-8b12-76361e2005bf" (UID: "ff4c4d35-276e-47e9-8b12-76361e2005bf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.984975 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-qhk9s" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.988745 4742 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/997634d0-c379-4978-a8a5-4da39a072ff4-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.988766 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff4c4d35-276e-47e9-8b12-76361e2005bf-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.988775 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8eed6205-7703-433f-83cf-d7b51867e5ee-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.993513 4742 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-qhk9s container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.57:8080/healthz\": dial tcp 10.217.0.57:8080: connect: connection refused" start-of-body= Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.996795 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-qhk9s" podUID="1c4ffc9a-27a6-44a8-9537-bd58e3fc8b7d" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.57:8080/healthz\": dial tcp 10.217.0.57:8080: connect: connection refused" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.993838 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8eed6205-7703-433f-83cf-d7b51867e5ee-kube-api-access-hn2bp" (OuterVolumeSpecName: "kube-api-access-hn2bp") pod "8eed6205-7703-433f-83cf-d7b51867e5ee" (UID: "8eed6205-7703-433f-83cf-d7b51867e5ee"). InnerVolumeSpecName "kube-api-access-hn2bp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:57:03 crc kubenswrapper[4742]: I1205 05:57:03.994712 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff4c4d35-276e-47e9-8b12-76361e2005bf-kube-api-access-pgz9p" (OuterVolumeSpecName: "kube-api-access-pgz9p") pod "ff4c4d35-276e-47e9-8b12-76361e2005bf" (UID: "ff4c4d35-276e-47e9-8b12-76361e2005bf"). InnerVolumeSpecName "kube-api-access-pgz9p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.006899 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/997634d0-c379-4978-a8a5-4da39a072ff4-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "997634d0-c379-4978-a8a5-4da39a072ff4" (UID: "997634d0-c379-4978-a8a5-4da39a072ff4"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.007022 4742 generic.go:334] "Generic (PLEG): container finished" podID="8eed6205-7703-433f-83cf-d7b51867e5ee" containerID="deca516b9100434de9391d51109ccee090c931ca4c65fc0fb55194b6b2543c51" exitCode=0 Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.007140 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dpnl4" event={"ID":"8eed6205-7703-433f-83cf-d7b51867e5ee","Type":"ContainerDied","Data":"deca516b9100434de9391d51109ccee090c931ca4c65fc0fb55194b6b2543c51"} Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.007150 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dpnl4" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.007170 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dpnl4" event={"ID":"8eed6205-7703-433f-83cf-d7b51867e5ee","Type":"ContainerDied","Data":"4f2f5318006687618054386d95f70e610c71438d3a6e920b2e3162314734390f"} Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.011231 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-89q4d"] Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.012829 4742 generic.go:334] "Generic (PLEG): container finished" podID="04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c" containerID="69abf936a854fbec80f87a64444ddeed3040064c9de7a164e6ad063408d1eee5" exitCode=0 Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.012875 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gwx44" event={"ID":"04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c","Type":"ContainerDied","Data":"69abf936a854fbec80f87a64444ddeed3040064c9de7a164e6ad063408d1eee5"} Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.012905 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gwx44" event={"ID":"04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c","Type":"ContainerDied","Data":"dec8e0c37f74bbcccbfd618083c96e0c689aeb27e761245802a8ab61ceaa4829"} Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.012988 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gwx44" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.015474 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-89q4d"] Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.015921 4742 scope.go:117] "RemoveContainer" containerID="8271a8b518c94a2c4b43c8bf04bf6af8b77f70aab282be7f508bb77b7659bc76" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.017562 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-qhk9s" podStartSLOduration=2.017545373 podStartE2EDuration="2.017545373s" podCreationTimestamp="2025-12-05 05:57:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:57:04.007367686 +0000 UTC m=+299.919502748" watchObservedRunningTime="2025-12-05 05:57:04.017545373 +0000 UTC m=+299.929680435" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.019234 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/997634d0-c379-4978-a8a5-4da39a072ff4-kube-api-access-clrzk" (OuterVolumeSpecName: "kube-api-access-clrzk") pod "997634d0-c379-4978-a8a5-4da39a072ff4" (UID: "997634d0-c379-4978-a8a5-4da39a072ff4"). InnerVolumeSpecName "kube-api-access-clrzk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.044927 4742 scope.go:117] "RemoveContainer" containerID="26c4d48595677dec3b46779cce238cfdcf84d132832ed98cca8fb7f539df8776" Dec 05 05:57:04 crc kubenswrapper[4742]: E1205 05:57:04.045615 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"26c4d48595677dec3b46779cce238cfdcf84d132832ed98cca8fb7f539df8776\": container with ID starting with 26c4d48595677dec3b46779cce238cfdcf84d132832ed98cca8fb7f539df8776 not found: ID does not exist" containerID="26c4d48595677dec3b46779cce238cfdcf84d132832ed98cca8fb7f539df8776" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.045675 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26c4d48595677dec3b46779cce238cfdcf84d132832ed98cca8fb7f539df8776"} err="failed to get container status \"26c4d48595677dec3b46779cce238cfdcf84d132832ed98cca8fb7f539df8776\": rpc error: code = NotFound desc = could not find container \"26c4d48595677dec3b46779cce238cfdcf84d132832ed98cca8fb7f539df8776\": container with ID starting with 26c4d48595677dec3b46779cce238cfdcf84d132832ed98cca8fb7f539df8776 not found: ID does not exist" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.045714 4742 scope.go:117] "RemoveContainer" containerID="d6695e144be5489e2f6725bfd4af346873154e90293f75d83b786b32b7f10e82" Dec 05 05:57:04 crc kubenswrapper[4742]: E1205 05:57:04.046470 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d6695e144be5489e2f6725bfd4af346873154e90293f75d83b786b32b7f10e82\": container with ID starting with d6695e144be5489e2f6725bfd4af346873154e90293f75d83b786b32b7f10e82 not found: ID does not exist" containerID="d6695e144be5489e2f6725bfd4af346873154e90293f75d83b786b32b7f10e82" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.046551 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6695e144be5489e2f6725bfd4af346873154e90293f75d83b786b32b7f10e82"} err="failed to get container status \"d6695e144be5489e2f6725bfd4af346873154e90293f75d83b786b32b7f10e82\": rpc error: code = NotFound desc = could not find container \"d6695e144be5489e2f6725bfd4af346873154e90293f75d83b786b32b7f10e82\": container with ID starting with d6695e144be5489e2f6725bfd4af346873154e90293f75d83b786b32b7f10e82 not found: ID does not exist" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.046612 4742 scope.go:117] "RemoveContainer" containerID="8271a8b518c94a2c4b43c8bf04bf6af8b77f70aab282be7f508bb77b7659bc76" Dec 05 05:57:04 crc kubenswrapper[4742]: E1205 05:57:04.046935 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8271a8b518c94a2c4b43c8bf04bf6af8b77f70aab282be7f508bb77b7659bc76\": container with ID starting with 8271a8b518c94a2c4b43c8bf04bf6af8b77f70aab282be7f508bb77b7659bc76 not found: ID does not exist" containerID="8271a8b518c94a2c4b43c8bf04bf6af8b77f70aab282be7f508bb77b7659bc76" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.046960 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8271a8b518c94a2c4b43c8bf04bf6af8b77f70aab282be7f508bb77b7659bc76"} err="failed to get container status \"8271a8b518c94a2c4b43c8bf04bf6af8b77f70aab282be7f508bb77b7659bc76\": rpc error: code = NotFound desc = could not find container \"8271a8b518c94a2c4b43c8bf04bf6af8b77f70aab282be7f508bb77b7659bc76\": container with ID starting with 8271a8b518c94a2c4b43c8bf04bf6af8b77f70aab282be7f508bb77b7659bc76 not found: ID does not exist" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.046979 4742 scope.go:117] "RemoveContainer" containerID="6f0f0322a7bac20ced74a7b9f9dab148eaa5ec992b9a9b5d14b70312070e2260" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.051827 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gwx44"] Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.056112 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-gwx44"] Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.060751 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8eed6205-7703-433f-83cf-d7b51867e5ee-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8eed6205-7703-433f-83cf-d7b51867e5ee" (UID: "8eed6205-7703-433f-83cf-d7b51867e5ee"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.063803 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff4c4d35-276e-47e9-8b12-76361e2005bf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ff4c4d35-276e-47e9-8b12-76361e2005bf" (UID: "ff4c4d35-276e-47e9-8b12-76361e2005bf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.074423 4742 scope.go:117] "RemoveContainer" containerID="8eb92da410396a44be153a27aff4ba6fd67851ca7f10e2a41c7943fab1729219" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.090116 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hn2bp\" (UniqueName: \"kubernetes.io/projected/8eed6205-7703-433f-83cf-d7b51867e5ee-kube-api-access-hn2bp\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.090145 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pgz9p\" (UniqueName: \"kubernetes.io/projected/ff4c4d35-276e-47e9-8b12-76361e2005bf-kube-api-access-pgz9p\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.090154 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8eed6205-7703-433f-83cf-d7b51867e5ee-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.090166 4742 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/997634d0-c379-4978-a8a5-4da39a072ff4-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.090178 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-clrzk\" (UniqueName: \"kubernetes.io/projected/997634d0-c379-4978-a8a5-4da39a072ff4-kube-api-access-clrzk\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.090323 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff4c4d35-276e-47e9-8b12-76361e2005bf-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.109719 4742 scope.go:117] "RemoveContainer" containerID="9e73b614ad427cfab372b9af0d28172ad095ca0be98cea2314c28dd7a960d7bd" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.123451 4742 scope.go:117] "RemoveContainer" containerID="6f0f0322a7bac20ced74a7b9f9dab148eaa5ec992b9a9b5d14b70312070e2260" Dec 05 05:57:04 crc kubenswrapper[4742]: E1205 05:57:04.123869 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f0f0322a7bac20ced74a7b9f9dab148eaa5ec992b9a9b5d14b70312070e2260\": container with ID starting with 6f0f0322a7bac20ced74a7b9f9dab148eaa5ec992b9a9b5d14b70312070e2260 not found: ID does not exist" containerID="6f0f0322a7bac20ced74a7b9f9dab148eaa5ec992b9a9b5d14b70312070e2260" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.123920 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f0f0322a7bac20ced74a7b9f9dab148eaa5ec992b9a9b5d14b70312070e2260"} err="failed to get container status \"6f0f0322a7bac20ced74a7b9f9dab148eaa5ec992b9a9b5d14b70312070e2260\": rpc error: code = NotFound desc = could not find container \"6f0f0322a7bac20ced74a7b9f9dab148eaa5ec992b9a9b5d14b70312070e2260\": container with ID starting with 6f0f0322a7bac20ced74a7b9f9dab148eaa5ec992b9a9b5d14b70312070e2260 not found: ID does not exist" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.123949 4742 scope.go:117] "RemoveContainer" containerID="8eb92da410396a44be153a27aff4ba6fd67851ca7f10e2a41c7943fab1729219" Dec 05 05:57:04 crc kubenswrapper[4742]: E1205 05:57:04.124408 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8eb92da410396a44be153a27aff4ba6fd67851ca7f10e2a41c7943fab1729219\": container with ID starting with 8eb92da410396a44be153a27aff4ba6fd67851ca7f10e2a41c7943fab1729219 not found: ID does not exist" containerID="8eb92da410396a44be153a27aff4ba6fd67851ca7f10e2a41c7943fab1729219" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.124430 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8eb92da410396a44be153a27aff4ba6fd67851ca7f10e2a41c7943fab1729219"} err="failed to get container status \"8eb92da410396a44be153a27aff4ba6fd67851ca7f10e2a41c7943fab1729219\": rpc error: code = NotFound desc = could not find container \"8eb92da410396a44be153a27aff4ba6fd67851ca7f10e2a41c7943fab1729219\": container with ID starting with 8eb92da410396a44be153a27aff4ba6fd67851ca7f10e2a41c7943fab1729219 not found: ID does not exist" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.124445 4742 scope.go:117] "RemoveContainer" containerID="9e73b614ad427cfab372b9af0d28172ad095ca0be98cea2314c28dd7a960d7bd" Dec 05 05:57:04 crc kubenswrapper[4742]: E1205 05:57:04.124726 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e73b614ad427cfab372b9af0d28172ad095ca0be98cea2314c28dd7a960d7bd\": container with ID starting with 9e73b614ad427cfab372b9af0d28172ad095ca0be98cea2314c28dd7a960d7bd not found: ID does not exist" containerID="9e73b614ad427cfab372b9af0d28172ad095ca0be98cea2314c28dd7a960d7bd" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.124744 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e73b614ad427cfab372b9af0d28172ad095ca0be98cea2314c28dd7a960d7bd"} err="failed to get container status \"9e73b614ad427cfab372b9af0d28172ad095ca0be98cea2314c28dd7a960d7bd\": rpc error: code = NotFound desc = could not find container \"9e73b614ad427cfab372b9af0d28172ad095ca0be98cea2314c28dd7a960d7bd\": container with ID starting with 9e73b614ad427cfab372b9af0d28172ad095ca0be98cea2314c28dd7a960d7bd not found: ID does not exist" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.124758 4742 scope.go:117] "RemoveContainer" containerID="b4d563ffc52fc72b39f577ec801c1678145af630b95fecd42e8816988ba906fa" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.136642 4742 scope.go:117] "RemoveContainer" containerID="b4d563ffc52fc72b39f577ec801c1678145af630b95fecd42e8816988ba906fa" Dec 05 05:57:04 crc kubenswrapper[4742]: E1205 05:57:04.136903 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4d563ffc52fc72b39f577ec801c1678145af630b95fecd42e8816988ba906fa\": container with ID starting with b4d563ffc52fc72b39f577ec801c1678145af630b95fecd42e8816988ba906fa not found: ID does not exist" containerID="b4d563ffc52fc72b39f577ec801c1678145af630b95fecd42e8816988ba906fa" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.136928 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4d563ffc52fc72b39f577ec801c1678145af630b95fecd42e8816988ba906fa"} err="failed to get container status \"b4d563ffc52fc72b39f577ec801c1678145af630b95fecd42e8816988ba906fa\": rpc error: code = NotFound desc = could not find container \"b4d563ffc52fc72b39f577ec801c1678145af630b95fecd42e8816988ba906fa\": container with ID starting with b4d563ffc52fc72b39f577ec801c1678145af630b95fecd42e8816988ba906fa not found: ID does not exist" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.136946 4742 scope.go:117] "RemoveContainer" containerID="deca516b9100434de9391d51109ccee090c931ca4c65fc0fb55194b6b2543c51" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.151630 4742 scope.go:117] "RemoveContainer" containerID="f566e5722527c38bbaebc7ebba10bdb1da9b2a042fce81eafe2fbed1b6499f8e" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.165868 4742 scope.go:117] "RemoveContainer" containerID="6473ab8a47f5feeb54252d32d91513201c9acf8e5c5cdb42530637a361cf8408" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.230047 4742 scope.go:117] "RemoveContainer" containerID="deca516b9100434de9391d51109ccee090c931ca4c65fc0fb55194b6b2543c51" Dec 05 05:57:04 crc kubenswrapper[4742]: E1205 05:57:04.230973 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"deca516b9100434de9391d51109ccee090c931ca4c65fc0fb55194b6b2543c51\": container with ID starting with deca516b9100434de9391d51109ccee090c931ca4c65fc0fb55194b6b2543c51 not found: ID does not exist" containerID="deca516b9100434de9391d51109ccee090c931ca4c65fc0fb55194b6b2543c51" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.231024 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"deca516b9100434de9391d51109ccee090c931ca4c65fc0fb55194b6b2543c51"} err="failed to get container status \"deca516b9100434de9391d51109ccee090c931ca4c65fc0fb55194b6b2543c51\": rpc error: code = NotFound desc = could not find container \"deca516b9100434de9391d51109ccee090c931ca4c65fc0fb55194b6b2543c51\": container with ID starting with deca516b9100434de9391d51109ccee090c931ca4c65fc0fb55194b6b2543c51 not found: ID does not exist" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.231076 4742 scope.go:117] "RemoveContainer" containerID="f566e5722527c38bbaebc7ebba10bdb1da9b2a042fce81eafe2fbed1b6499f8e" Dec 05 05:57:04 crc kubenswrapper[4742]: E1205 05:57:04.231721 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f566e5722527c38bbaebc7ebba10bdb1da9b2a042fce81eafe2fbed1b6499f8e\": container with ID starting with f566e5722527c38bbaebc7ebba10bdb1da9b2a042fce81eafe2fbed1b6499f8e not found: ID does not exist" containerID="f566e5722527c38bbaebc7ebba10bdb1da9b2a042fce81eafe2fbed1b6499f8e" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.231813 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f566e5722527c38bbaebc7ebba10bdb1da9b2a042fce81eafe2fbed1b6499f8e"} err="failed to get container status \"f566e5722527c38bbaebc7ebba10bdb1da9b2a042fce81eafe2fbed1b6499f8e\": rpc error: code = NotFound desc = could not find container \"f566e5722527c38bbaebc7ebba10bdb1da9b2a042fce81eafe2fbed1b6499f8e\": container with ID starting with f566e5722527c38bbaebc7ebba10bdb1da9b2a042fce81eafe2fbed1b6499f8e not found: ID does not exist" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.231833 4742 scope.go:117] "RemoveContainer" containerID="6473ab8a47f5feeb54252d32d91513201c9acf8e5c5cdb42530637a361cf8408" Dec 05 05:57:04 crc kubenswrapper[4742]: E1205 05:57:04.232551 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6473ab8a47f5feeb54252d32d91513201c9acf8e5c5cdb42530637a361cf8408\": container with ID starting with 6473ab8a47f5feeb54252d32d91513201c9acf8e5c5cdb42530637a361cf8408 not found: ID does not exist" containerID="6473ab8a47f5feeb54252d32d91513201c9acf8e5c5cdb42530637a361cf8408" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.232591 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6473ab8a47f5feeb54252d32d91513201c9acf8e5c5cdb42530637a361cf8408"} err="failed to get container status \"6473ab8a47f5feeb54252d32d91513201c9acf8e5c5cdb42530637a361cf8408\": rpc error: code = NotFound desc = could not find container \"6473ab8a47f5feeb54252d32d91513201c9acf8e5c5cdb42530637a361cf8408\": container with ID starting with 6473ab8a47f5feeb54252d32d91513201c9acf8e5c5cdb42530637a361cf8408 not found: ID does not exist" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.232629 4742 scope.go:117] "RemoveContainer" containerID="69abf936a854fbec80f87a64444ddeed3040064c9de7a164e6ad063408d1eee5" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.246133 4742 scope.go:117] "RemoveContainer" containerID="e36703c20bafcf893c5fa079003def09702fc6b0c65171a262ec9b5b9fd3fbc8" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.265988 4742 scope.go:117] "RemoveContainer" containerID="29fd0f4fdb9d682e2f86d4c9a064b30b672586ac8c98af56ef1ed82dbc8e97b9" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.282497 4742 scope.go:117] "RemoveContainer" containerID="69abf936a854fbec80f87a64444ddeed3040064c9de7a164e6ad063408d1eee5" Dec 05 05:57:04 crc kubenswrapper[4742]: E1205 05:57:04.283743 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69abf936a854fbec80f87a64444ddeed3040064c9de7a164e6ad063408d1eee5\": container with ID starting with 69abf936a854fbec80f87a64444ddeed3040064c9de7a164e6ad063408d1eee5 not found: ID does not exist" containerID="69abf936a854fbec80f87a64444ddeed3040064c9de7a164e6ad063408d1eee5" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.283802 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69abf936a854fbec80f87a64444ddeed3040064c9de7a164e6ad063408d1eee5"} err="failed to get container status \"69abf936a854fbec80f87a64444ddeed3040064c9de7a164e6ad063408d1eee5\": rpc error: code = NotFound desc = could not find container \"69abf936a854fbec80f87a64444ddeed3040064c9de7a164e6ad063408d1eee5\": container with ID starting with 69abf936a854fbec80f87a64444ddeed3040064c9de7a164e6ad063408d1eee5 not found: ID does not exist" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.283839 4742 scope.go:117] "RemoveContainer" containerID="e36703c20bafcf893c5fa079003def09702fc6b0c65171a262ec9b5b9fd3fbc8" Dec 05 05:57:04 crc kubenswrapper[4742]: E1205 05:57:04.284706 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e36703c20bafcf893c5fa079003def09702fc6b0c65171a262ec9b5b9fd3fbc8\": container with ID starting with e36703c20bafcf893c5fa079003def09702fc6b0c65171a262ec9b5b9fd3fbc8 not found: ID does not exist" containerID="e36703c20bafcf893c5fa079003def09702fc6b0c65171a262ec9b5b9fd3fbc8" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.284747 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e36703c20bafcf893c5fa079003def09702fc6b0c65171a262ec9b5b9fd3fbc8"} err="failed to get container status \"e36703c20bafcf893c5fa079003def09702fc6b0c65171a262ec9b5b9fd3fbc8\": rpc error: code = NotFound desc = could not find container \"e36703c20bafcf893c5fa079003def09702fc6b0c65171a262ec9b5b9fd3fbc8\": container with ID starting with e36703c20bafcf893c5fa079003def09702fc6b0c65171a262ec9b5b9fd3fbc8 not found: ID does not exist" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.284768 4742 scope.go:117] "RemoveContainer" containerID="29fd0f4fdb9d682e2f86d4c9a064b30b672586ac8c98af56ef1ed82dbc8e97b9" Dec 05 05:57:04 crc kubenswrapper[4742]: E1205 05:57:04.285268 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29fd0f4fdb9d682e2f86d4c9a064b30b672586ac8c98af56ef1ed82dbc8e97b9\": container with ID starting with 29fd0f4fdb9d682e2f86d4c9a064b30b672586ac8c98af56ef1ed82dbc8e97b9 not found: ID does not exist" containerID="29fd0f4fdb9d682e2f86d4c9a064b30b672586ac8c98af56ef1ed82dbc8e97b9" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.285335 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29fd0f4fdb9d682e2f86d4c9a064b30b672586ac8c98af56ef1ed82dbc8e97b9"} err="failed to get container status \"29fd0f4fdb9d682e2f86d4c9a064b30b672586ac8c98af56ef1ed82dbc8e97b9\": rpc error: code = NotFound desc = could not find container \"29fd0f4fdb9d682e2f86d4c9a064b30b672586ac8c98af56ef1ed82dbc8e97b9\": container with ID starting with 29fd0f4fdb9d682e2f86d4c9a064b30b672586ac8c98af56ef1ed82dbc8e97b9 not found: ID does not exist" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.317768 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kjh59"] Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.340264 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-kjh59"] Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.343723 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-dfmr9"] Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.347292 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-dfmr9"] Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.361354 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dpnl4"] Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.365526 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-dpnl4"] Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.397561 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c" path="/var/lib/kubelet/pods/04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c/volumes" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.402034 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8eed6205-7703-433f-83cf-d7b51867e5ee" path="/var/lib/kubelet/pods/8eed6205-7703-433f-83cf-d7b51867e5ee/volumes" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.402690 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="997634d0-c379-4978-a8a5-4da39a072ff4" path="/var/lib/kubelet/pods/997634d0-c379-4978-a8a5-4da39a072ff4/volumes" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.403633 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8ed5ac0-ecdc-4f4f-a13b-223289da1f67" path="/var/lib/kubelet/pods/d8ed5ac0-ecdc-4f4f-a13b-223289da1f67/volumes" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.404895 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff4c4d35-276e-47e9-8b12-76361e2005bf" path="/var/lib/kubelet/pods/ff4c4d35-276e-47e9-8b12-76361e2005bf/volumes" Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.593788 4742 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 05:57:04 crc kubenswrapper[4742]: I1205 05:57:04.594463 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://3c67e59b790f6e0c7fbf1efc906a10b3237cd9e88d26eaa41efdf19f51b5772f" gracePeriod=5 Dec 05 05:57:05 crc kubenswrapper[4742]: I1205 05:57:05.021126 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-qhk9s" Dec 05 05:57:09 crc kubenswrapper[4742]: I1205 05:57:09.707224 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 05 05:57:09 crc kubenswrapper[4742]: I1205 05:57:09.707817 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:57:09 crc kubenswrapper[4742]: I1205 05:57:09.779049 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 05:57:09 crc kubenswrapper[4742]: I1205 05:57:09.779156 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 05:57:09 crc kubenswrapper[4742]: I1205 05:57:09.779181 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 05:57:09 crc kubenswrapper[4742]: I1205 05:57:09.779220 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 05:57:09 crc kubenswrapper[4742]: I1205 05:57:09.779283 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 05:57:09 crc kubenswrapper[4742]: I1205 05:57:09.779317 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:57:09 crc kubenswrapper[4742]: I1205 05:57:09.779385 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:57:09 crc kubenswrapper[4742]: I1205 05:57:09.779440 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:57:09 crc kubenswrapper[4742]: I1205 05:57:09.779465 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:57:09 crc kubenswrapper[4742]: I1205 05:57:09.779738 4742 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:09 crc kubenswrapper[4742]: I1205 05:57:09.779757 4742 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:09 crc kubenswrapper[4742]: I1205 05:57:09.779768 4742 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:09 crc kubenswrapper[4742]: I1205 05:57:09.779780 4742 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:09 crc kubenswrapper[4742]: I1205 05:57:09.789572 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 05:57:09 crc kubenswrapper[4742]: I1205 05:57:09.880441 4742 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:10 crc kubenswrapper[4742]: I1205 05:57:10.050503 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 05 05:57:10 crc kubenswrapper[4742]: I1205 05:57:10.050553 4742 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="3c67e59b790f6e0c7fbf1efc906a10b3237cd9e88d26eaa41efdf19f51b5772f" exitCode=137 Dec 05 05:57:10 crc kubenswrapper[4742]: I1205 05:57:10.050590 4742 scope.go:117] "RemoveContainer" containerID="3c67e59b790f6e0c7fbf1efc906a10b3237cd9e88d26eaa41efdf19f51b5772f" Dec 05 05:57:10 crc kubenswrapper[4742]: I1205 05:57:10.050642 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 05:57:10 crc kubenswrapper[4742]: I1205 05:57:10.075585 4742 scope.go:117] "RemoveContainer" containerID="3c67e59b790f6e0c7fbf1efc906a10b3237cd9e88d26eaa41efdf19f51b5772f" Dec 05 05:57:10 crc kubenswrapper[4742]: E1205 05:57:10.075979 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c67e59b790f6e0c7fbf1efc906a10b3237cd9e88d26eaa41efdf19f51b5772f\": container with ID starting with 3c67e59b790f6e0c7fbf1efc906a10b3237cd9e88d26eaa41efdf19f51b5772f not found: ID does not exist" containerID="3c67e59b790f6e0c7fbf1efc906a10b3237cd9e88d26eaa41efdf19f51b5772f" Dec 05 05:57:10 crc kubenswrapper[4742]: I1205 05:57:10.076016 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c67e59b790f6e0c7fbf1efc906a10b3237cd9e88d26eaa41efdf19f51b5772f"} err="failed to get container status \"3c67e59b790f6e0c7fbf1efc906a10b3237cd9e88d26eaa41efdf19f51b5772f\": rpc error: code = NotFound desc = could not find container \"3c67e59b790f6e0c7fbf1efc906a10b3237cd9e88d26eaa41efdf19f51b5772f\": container with ID starting with 3c67e59b790f6e0c7fbf1efc906a10b3237cd9e88d26eaa41efdf19f51b5772f not found: ID does not exist" Dec 05 05:57:10 crc kubenswrapper[4742]: I1205 05:57:10.391168 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 05 05:57:10 crc kubenswrapper[4742]: I1205 05:57:10.391748 4742 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Dec 05 05:57:10 crc kubenswrapper[4742]: I1205 05:57:10.402365 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 05:57:10 crc kubenswrapper[4742]: I1205 05:57:10.402420 4742 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="70fea950-1f85-4982-8a57-2db0a1a43224" Dec 05 05:57:10 crc kubenswrapper[4742]: I1205 05:57:10.405879 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 05:57:10 crc kubenswrapper[4742]: I1205 05:57:10.405920 4742 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="70fea950-1f85-4982-8a57-2db0a1a43224" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.184894 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-t26cm"] Dec 05 05:57:16 crc kubenswrapper[4742]: E1205 05:57:16.185662 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.185678 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 05:57:16 crc kubenswrapper[4742]: E1205 05:57:16.185690 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff4c4d35-276e-47e9-8b12-76361e2005bf" containerName="extract-content" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.185698 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff4c4d35-276e-47e9-8b12-76361e2005bf" containerName="extract-content" Dec 05 05:57:16 crc kubenswrapper[4742]: E1205 05:57:16.185712 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c" containerName="registry-server" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.185720 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c" containerName="registry-server" Dec 05 05:57:16 crc kubenswrapper[4742]: E1205 05:57:16.185728 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff4c4d35-276e-47e9-8b12-76361e2005bf" containerName="extract-utilities" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.185735 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff4c4d35-276e-47e9-8b12-76361e2005bf" containerName="extract-utilities" Dec 05 05:57:16 crc kubenswrapper[4742]: E1205 05:57:16.185746 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8ed5ac0-ecdc-4f4f-a13b-223289da1f67" containerName="extract-utilities" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.185753 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8ed5ac0-ecdc-4f4f-a13b-223289da1f67" containerName="extract-utilities" Dec 05 05:57:16 crc kubenswrapper[4742]: E1205 05:57:16.185763 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c" containerName="extract-utilities" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.185770 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c" containerName="extract-utilities" Dec 05 05:57:16 crc kubenswrapper[4742]: E1205 05:57:16.185780 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8ed5ac0-ecdc-4f4f-a13b-223289da1f67" containerName="extract-content" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.185787 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8ed5ac0-ecdc-4f4f-a13b-223289da1f67" containerName="extract-content" Dec 05 05:57:16 crc kubenswrapper[4742]: E1205 05:57:16.185798 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8eed6205-7703-433f-83cf-d7b51867e5ee" containerName="extract-utilities" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.185804 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="8eed6205-7703-433f-83cf-d7b51867e5ee" containerName="extract-utilities" Dec 05 05:57:16 crc kubenswrapper[4742]: E1205 05:57:16.185814 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8eed6205-7703-433f-83cf-d7b51867e5ee" containerName="extract-content" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.185820 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="8eed6205-7703-433f-83cf-d7b51867e5ee" containerName="extract-content" Dec 05 05:57:16 crc kubenswrapper[4742]: E1205 05:57:16.185834 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff4c4d35-276e-47e9-8b12-76361e2005bf" containerName="registry-server" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.185842 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff4c4d35-276e-47e9-8b12-76361e2005bf" containerName="registry-server" Dec 05 05:57:16 crc kubenswrapper[4742]: E1205 05:57:16.185853 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8ed5ac0-ecdc-4f4f-a13b-223289da1f67" containerName="registry-server" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.185860 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8ed5ac0-ecdc-4f4f-a13b-223289da1f67" containerName="registry-server" Dec 05 05:57:16 crc kubenswrapper[4742]: E1205 05:57:16.185869 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="997634d0-c379-4978-a8a5-4da39a072ff4" containerName="marketplace-operator" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.185876 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="997634d0-c379-4978-a8a5-4da39a072ff4" containerName="marketplace-operator" Dec 05 05:57:16 crc kubenswrapper[4742]: E1205 05:57:16.185884 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c" containerName="extract-content" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.185891 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c" containerName="extract-content" Dec 05 05:57:16 crc kubenswrapper[4742]: E1205 05:57:16.185900 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8eed6205-7703-433f-83cf-d7b51867e5ee" containerName="registry-server" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.185907 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="8eed6205-7703-433f-83cf-d7b51867e5ee" containerName="registry-server" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.186000 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="04d84f7d-6c91-44e2-8dfe-6f1f69d1b12c" containerName="registry-server" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.186014 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="8eed6205-7703-433f-83cf-d7b51867e5ee" containerName="registry-server" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.186025 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff4c4d35-276e-47e9-8b12-76361e2005bf" containerName="registry-server" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.186035 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8ed5ac0-ecdc-4f4f-a13b-223289da1f67" containerName="registry-server" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.186042 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="997634d0-c379-4978-a8a5-4da39a072ff4" containerName="marketplace-operator" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.186057 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.186854 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t26cm" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.189564 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.199252 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t26cm"] Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.281803 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/96fc5c33-e057-48e7-9e20-3b8860f09a1f-utilities\") pod \"certified-operators-t26cm\" (UID: \"96fc5c33-e057-48e7-9e20-3b8860f09a1f\") " pod="openshift-marketplace/certified-operators-t26cm" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.282095 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/96fc5c33-e057-48e7-9e20-3b8860f09a1f-catalog-content\") pod \"certified-operators-t26cm\" (UID: \"96fc5c33-e057-48e7-9e20-3b8860f09a1f\") " pod="openshift-marketplace/certified-operators-t26cm" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.282245 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92x4z\" (UniqueName: \"kubernetes.io/projected/96fc5c33-e057-48e7-9e20-3b8860f09a1f-kube-api-access-92x4z\") pod \"certified-operators-t26cm\" (UID: \"96fc5c33-e057-48e7-9e20-3b8860f09a1f\") " pod="openshift-marketplace/certified-operators-t26cm" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.382989 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/96fc5c33-e057-48e7-9e20-3b8860f09a1f-utilities\") pod \"certified-operators-t26cm\" (UID: \"96fc5c33-e057-48e7-9e20-3b8860f09a1f\") " pod="openshift-marketplace/certified-operators-t26cm" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.383276 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/96fc5c33-e057-48e7-9e20-3b8860f09a1f-catalog-content\") pod \"certified-operators-t26cm\" (UID: \"96fc5c33-e057-48e7-9e20-3b8860f09a1f\") " pod="openshift-marketplace/certified-operators-t26cm" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.383356 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92x4z\" (UniqueName: \"kubernetes.io/projected/96fc5c33-e057-48e7-9e20-3b8860f09a1f-kube-api-access-92x4z\") pod \"certified-operators-t26cm\" (UID: \"96fc5c33-e057-48e7-9e20-3b8860f09a1f\") " pod="openshift-marketplace/certified-operators-t26cm" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.383674 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/96fc5c33-e057-48e7-9e20-3b8860f09a1f-utilities\") pod \"certified-operators-t26cm\" (UID: \"96fc5c33-e057-48e7-9e20-3b8860f09a1f\") " pod="openshift-marketplace/certified-operators-t26cm" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.383973 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/96fc5c33-e057-48e7-9e20-3b8860f09a1f-catalog-content\") pod \"certified-operators-t26cm\" (UID: \"96fc5c33-e057-48e7-9e20-3b8860f09a1f\") " pod="openshift-marketplace/certified-operators-t26cm" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.398617 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dms6l"] Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.400660 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dms6l" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.403155 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.404487 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92x4z\" (UniqueName: \"kubernetes.io/projected/96fc5c33-e057-48e7-9e20-3b8860f09a1f-kube-api-access-92x4z\") pod \"certified-operators-t26cm\" (UID: \"96fc5c33-e057-48e7-9e20-3b8860f09a1f\") " pod="openshift-marketplace/certified-operators-t26cm" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.405909 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dms6l"] Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.484866 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4265f02-a9b1-4e0d-b568-e928700ff3f6-catalog-content\") pod \"community-operators-dms6l\" (UID: \"e4265f02-a9b1-4e0d-b568-e928700ff3f6\") " pod="openshift-marketplace/community-operators-dms6l" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.485245 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4265f02-a9b1-4e0d-b568-e928700ff3f6-utilities\") pod \"community-operators-dms6l\" (UID: \"e4265f02-a9b1-4e0d-b568-e928700ff3f6\") " pod="openshift-marketplace/community-operators-dms6l" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.485359 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hcm22\" (UniqueName: \"kubernetes.io/projected/e4265f02-a9b1-4e0d-b568-e928700ff3f6-kube-api-access-hcm22\") pod \"community-operators-dms6l\" (UID: \"e4265f02-a9b1-4e0d-b568-e928700ff3f6\") " pod="openshift-marketplace/community-operators-dms6l" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.507719 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t26cm" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.586612 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4265f02-a9b1-4e0d-b568-e928700ff3f6-catalog-content\") pod \"community-operators-dms6l\" (UID: \"e4265f02-a9b1-4e0d-b568-e928700ff3f6\") " pod="openshift-marketplace/community-operators-dms6l" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.586662 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4265f02-a9b1-4e0d-b568-e928700ff3f6-utilities\") pod \"community-operators-dms6l\" (UID: \"e4265f02-a9b1-4e0d-b568-e928700ff3f6\") " pod="openshift-marketplace/community-operators-dms6l" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.586714 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hcm22\" (UniqueName: \"kubernetes.io/projected/e4265f02-a9b1-4e0d-b568-e928700ff3f6-kube-api-access-hcm22\") pod \"community-operators-dms6l\" (UID: \"e4265f02-a9b1-4e0d-b568-e928700ff3f6\") " pod="openshift-marketplace/community-operators-dms6l" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.587162 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4265f02-a9b1-4e0d-b568-e928700ff3f6-utilities\") pod \"community-operators-dms6l\" (UID: \"e4265f02-a9b1-4e0d-b568-e928700ff3f6\") " pod="openshift-marketplace/community-operators-dms6l" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.587245 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4265f02-a9b1-4e0d-b568-e928700ff3f6-catalog-content\") pod \"community-operators-dms6l\" (UID: \"e4265f02-a9b1-4e0d-b568-e928700ff3f6\") " pod="openshift-marketplace/community-operators-dms6l" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.604538 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hcm22\" (UniqueName: \"kubernetes.io/projected/e4265f02-a9b1-4e0d-b568-e928700ff3f6-kube-api-access-hcm22\") pod \"community-operators-dms6l\" (UID: \"e4265f02-a9b1-4e0d-b568-e928700ff3f6\") " pod="openshift-marketplace/community-operators-dms6l" Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.750312 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dms6l" Dec 05 05:57:16 crc kubenswrapper[4742]: W1205 05:57:16.913329 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode4265f02_a9b1_4e0d_b568_e928700ff3f6.slice/crio-7d80c48eb7490bed3727db8e3481096ba834e0f488048a3d8675daf17eeecd60 WatchSource:0}: Error finding container 7d80c48eb7490bed3727db8e3481096ba834e0f488048a3d8675daf17eeecd60: Status 404 returned error can't find the container with id 7d80c48eb7490bed3727db8e3481096ba834e0f488048a3d8675daf17eeecd60 Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.914461 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dms6l"] Dec 05 05:57:16 crc kubenswrapper[4742]: I1205 05:57:16.925032 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t26cm"] Dec 05 05:57:16 crc kubenswrapper[4742]: W1205 05:57:16.936170 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod96fc5c33_e057_48e7_9e20_3b8860f09a1f.slice/crio-94158fa4511c9fba515023a2d9cc87c20c40155cc62d7c7789604442ffb69fc2 WatchSource:0}: Error finding container 94158fa4511c9fba515023a2d9cc87c20c40155cc62d7c7789604442ffb69fc2: Status 404 returned error can't find the container with id 94158fa4511c9fba515023a2d9cc87c20c40155cc62d7c7789604442ffb69fc2 Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.088909 4742 generic.go:334] "Generic (PLEG): container finished" podID="96fc5c33-e057-48e7-9e20-3b8860f09a1f" containerID="004a58f0812cf13a78e54e5105c9a1120839bde0a8b504faccbbb9d5aac21898" exitCode=0 Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.088954 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t26cm" event={"ID":"96fc5c33-e057-48e7-9e20-3b8860f09a1f","Type":"ContainerDied","Data":"004a58f0812cf13a78e54e5105c9a1120839bde0a8b504faccbbb9d5aac21898"} Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.088993 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t26cm" event={"ID":"96fc5c33-e057-48e7-9e20-3b8860f09a1f","Type":"ContainerStarted","Data":"94158fa4511c9fba515023a2d9cc87c20c40155cc62d7c7789604442ffb69fc2"} Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.090078 4742 generic.go:334] "Generic (PLEG): container finished" podID="e4265f02-a9b1-4e0d-b568-e928700ff3f6" containerID="c779f84f2b7dd7cfa3d34da9a4cdb3b3649e117a1ed05dc1d103a3f8fa824323" exitCode=0 Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.090117 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dms6l" event={"ID":"e4265f02-a9b1-4e0d-b568-e928700ff3f6","Type":"ContainerDied","Data":"c779f84f2b7dd7cfa3d34da9a4cdb3b3649e117a1ed05dc1d103a3f8fa824323"} Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.090152 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dms6l" event={"ID":"e4265f02-a9b1-4e0d-b568-e928700ff3f6","Type":"ContainerStarted","Data":"7d80c48eb7490bed3727db8e3481096ba834e0f488048a3d8675daf17eeecd60"} Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.287371 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-vfstf"] Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.287648 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-vfstf" podUID="a302e9ed-44a5-41e8-8e91-c37771dca329" containerName="controller-manager" containerID="cri-o://e2b2237464488a3a85672795a12d4413a69b6eee6202ee5db9aa722ebeba7bf0" gracePeriod=30 Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.356801 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-2279c"] Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.357397 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2279c" podUID="0c7188d0-4020-4749-8bd6-98b637ce3f3c" containerName="route-controller-manager" containerID="cri-o://abe36ba16e7322e6e35e3ab2d4c8d5522eb63369ba15fd346b473da48bf29f06" gracePeriod=30 Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.625550 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-vfstf" Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.700078 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a302e9ed-44a5-41e8-8e91-c37771dca329-client-ca\") pod \"a302e9ed-44a5-41e8-8e91-c37771dca329\" (UID: \"a302e9ed-44a5-41e8-8e91-c37771dca329\") " Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.700145 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wcwxk\" (UniqueName: \"kubernetes.io/projected/a302e9ed-44a5-41e8-8e91-c37771dca329-kube-api-access-wcwxk\") pod \"a302e9ed-44a5-41e8-8e91-c37771dca329\" (UID: \"a302e9ed-44a5-41e8-8e91-c37771dca329\") " Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.700176 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a302e9ed-44a5-41e8-8e91-c37771dca329-config\") pod \"a302e9ed-44a5-41e8-8e91-c37771dca329\" (UID: \"a302e9ed-44a5-41e8-8e91-c37771dca329\") " Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.700209 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a302e9ed-44a5-41e8-8e91-c37771dca329-serving-cert\") pod \"a302e9ed-44a5-41e8-8e91-c37771dca329\" (UID: \"a302e9ed-44a5-41e8-8e91-c37771dca329\") " Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.700236 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a302e9ed-44a5-41e8-8e91-c37771dca329-proxy-ca-bundles\") pod \"a302e9ed-44a5-41e8-8e91-c37771dca329\" (UID: \"a302e9ed-44a5-41e8-8e91-c37771dca329\") " Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.701186 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a302e9ed-44a5-41e8-8e91-c37771dca329-config" (OuterVolumeSpecName: "config") pod "a302e9ed-44a5-41e8-8e91-c37771dca329" (UID: "a302e9ed-44a5-41e8-8e91-c37771dca329"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.701207 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a302e9ed-44a5-41e8-8e91-c37771dca329-client-ca" (OuterVolumeSpecName: "client-ca") pod "a302e9ed-44a5-41e8-8e91-c37771dca329" (UID: "a302e9ed-44a5-41e8-8e91-c37771dca329"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.701225 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a302e9ed-44a5-41e8-8e91-c37771dca329-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "a302e9ed-44a5-41e8-8e91-c37771dca329" (UID: "a302e9ed-44a5-41e8-8e91-c37771dca329"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.702917 4742 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a302e9ed-44a5-41e8-8e91-c37771dca329-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.702944 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a302e9ed-44a5-41e8-8e91-c37771dca329-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.702957 4742 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a302e9ed-44a5-41e8-8e91-c37771dca329-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.719479 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a302e9ed-44a5-41e8-8e91-c37771dca329-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "a302e9ed-44a5-41e8-8e91-c37771dca329" (UID: "a302e9ed-44a5-41e8-8e91-c37771dca329"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.719734 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a302e9ed-44a5-41e8-8e91-c37771dca329-kube-api-access-wcwxk" (OuterVolumeSpecName: "kube-api-access-wcwxk") pod "a302e9ed-44a5-41e8-8e91-c37771dca329" (UID: "a302e9ed-44a5-41e8-8e91-c37771dca329"). InnerVolumeSpecName "kube-api-access-wcwxk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.736753 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2279c" Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.803814 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0c7188d0-4020-4749-8bd6-98b637ce3f3c-client-ca\") pod \"0c7188d0-4020-4749-8bd6-98b637ce3f3c\" (UID: \"0c7188d0-4020-4749-8bd6-98b637ce3f3c\") " Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.804195 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0c7188d0-4020-4749-8bd6-98b637ce3f3c-serving-cert\") pod \"0c7188d0-4020-4749-8bd6-98b637ce3f3c\" (UID: \"0c7188d0-4020-4749-8bd6-98b637ce3f3c\") " Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.804226 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fx4jb\" (UniqueName: \"kubernetes.io/projected/0c7188d0-4020-4749-8bd6-98b637ce3f3c-kube-api-access-fx4jb\") pod \"0c7188d0-4020-4749-8bd6-98b637ce3f3c\" (UID: \"0c7188d0-4020-4749-8bd6-98b637ce3f3c\") " Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.804278 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c7188d0-4020-4749-8bd6-98b637ce3f3c-config\") pod \"0c7188d0-4020-4749-8bd6-98b637ce3f3c\" (UID: \"0c7188d0-4020-4749-8bd6-98b637ce3f3c\") " Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.804494 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wcwxk\" (UniqueName: \"kubernetes.io/projected/a302e9ed-44a5-41e8-8e91-c37771dca329-kube-api-access-wcwxk\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.804511 4742 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a302e9ed-44a5-41e8-8e91-c37771dca329-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.804974 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c7188d0-4020-4749-8bd6-98b637ce3f3c-client-ca" (OuterVolumeSpecName: "client-ca") pod "0c7188d0-4020-4749-8bd6-98b637ce3f3c" (UID: "0c7188d0-4020-4749-8bd6-98b637ce3f3c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.804987 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c7188d0-4020-4749-8bd6-98b637ce3f3c-config" (OuterVolumeSpecName: "config") pod "0c7188d0-4020-4749-8bd6-98b637ce3f3c" (UID: "0c7188d0-4020-4749-8bd6-98b637ce3f3c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.807275 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c7188d0-4020-4749-8bd6-98b637ce3f3c-kube-api-access-fx4jb" (OuterVolumeSpecName: "kube-api-access-fx4jb") pod "0c7188d0-4020-4749-8bd6-98b637ce3f3c" (UID: "0c7188d0-4020-4749-8bd6-98b637ce3f3c"). InnerVolumeSpecName "kube-api-access-fx4jb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.807408 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c7188d0-4020-4749-8bd6-98b637ce3f3c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0c7188d0-4020-4749-8bd6-98b637ce3f3c" (UID: "0c7188d0-4020-4749-8bd6-98b637ce3f3c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.907046 4742 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0c7188d0-4020-4749-8bd6-98b637ce3f3c-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.907104 4742 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0c7188d0-4020-4749-8bd6-98b637ce3f3c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.907118 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fx4jb\" (UniqueName: \"kubernetes.io/projected/0c7188d0-4020-4749-8bd6-98b637ce3f3c-kube-api-access-fx4jb\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:17 crc kubenswrapper[4742]: I1205 05:57:17.907134 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c7188d0-4020-4749-8bd6-98b637ce3f3c-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.099131 4742 generic.go:334] "Generic (PLEG): container finished" podID="96fc5c33-e057-48e7-9e20-3b8860f09a1f" containerID="e6aa51d2f2d193189f393d63706636b96427c7e1b3e35f7fe9471db064782f22" exitCode=0 Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.099216 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t26cm" event={"ID":"96fc5c33-e057-48e7-9e20-3b8860f09a1f","Type":"ContainerDied","Data":"e6aa51d2f2d193189f393d63706636b96427c7e1b3e35f7fe9471db064782f22"} Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.106584 4742 generic.go:334] "Generic (PLEG): container finished" podID="e4265f02-a9b1-4e0d-b568-e928700ff3f6" containerID="d45f30fdc0831f2a8f4295ec00e5993a9411c1b629833f3db58cd5554b46ebbc" exitCode=0 Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.106755 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dms6l" event={"ID":"e4265f02-a9b1-4e0d-b568-e928700ff3f6","Type":"ContainerDied","Data":"d45f30fdc0831f2a8f4295ec00e5993a9411c1b629833f3db58cd5554b46ebbc"} Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.111306 4742 generic.go:334] "Generic (PLEG): container finished" podID="0c7188d0-4020-4749-8bd6-98b637ce3f3c" containerID="abe36ba16e7322e6e35e3ab2d4c8d5522eb63369ba15fd346b473da48bf29f06" exitCode=0 Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.111432 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2279c" Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.111404 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2279c" event={"ID":"0c7188d0-4020-4749-8bd6-98b637ce3f3c","Type":"ContainerDied","Data":"abe36ba16e7322e6e35e3ab2d4c8d5522eb63369ba15fd346b473da48bf29f06"} Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.111584 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-2279c" event={"ID":"0c7188d0-4020-4749-8bd6-98b637ce3f3c","Type":"ContainerDied","Data":"6df29b27f7728e446b6b15cbc3ab227870683efa6441186dce4c1617a0cfb9eb"} Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.111609 4742 scope.go:117] "RemoveContainer" containerID="abe36ba16e7322e6e35e3ab2d4c8d5522eb63369ba15fd346b473da48bf29f06" Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.117621 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-vfstf" Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.117766 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-vfstf" event={"ID":"a302e9ed-44a5-41e8-8e91-c37771dca329","Type":"ContainerDied","Data":"e2b2237464488a3a85672795a12d4413a69b6eee6202ee5db9aa722ebeba7bf0"} Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.117768 4742 generic.go:334] "Generic (PLEG): container finished" podID="a302e9ed-44a5-41e8-8e91-c37771dca329" containerID="e2b2237464488a3a85672795a12d4413a69b6eee6202ee5db9aa722ebeba7bf0" exitCode=0 Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.117959 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-vfstf" event={"ID":"a302e9ed-44a5-41e8-8e91-c37771dca329","Type":"ContainerDied","Data":"04c62f9005eb5c892bf941bb9b5a4863a1f2ada64fbe8cc3e2cac8758d9c2aa5"} Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.163921 4742 scope.go:117] "RemoveContainer" containerID="abe36ba16e7322e6e35e3ab2d4c8d5522eb63369ba15fd346b473da48bf29f06" Dec 05 05:57:18 crc kubenswrapper[4742]: E1205 05:57:18.164438 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"abe36ba16e7322e6e35e3ab2d4c8d5522eb63369ba15fd346b473da48bf29f06\": container with ID starting with abe36ba16e7322e6e35e3ab2d4c8d5522eb63369ba15fd346b473da48bf29f06 not found: ID does not exist" containerID="abe36ba16e7322e6e35e3ab2d4c8d5522eb63369ba15fd346b473da48bf29f06" Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.164473 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"abe36ba16e7322e6e35e3ab2d4c8d5522eb63369ba15fd346b473da48bf29f06"} err="failed to get container status \"abe36ba16e7322e6e35e3ab2d4c8d5522eb63369ba15fd346b473da48bf29f06\": rpc error: code = NotFound desc = could not find container \"abe36ba16e7322e6e35e3ab2d4c8d5522eb63369ba15fd346b473da48bf29f06\": container with ID starting with abe36ba16e7322e6e35e3ab2d4c8d5522eb63369ba15fd346b473da48bf29f06 not found: ID does not exist" Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.164496 4742 scope.go:117] "RemoveContainer" containerID="e2b2237464488a3a85672795a12d4413a69b6eee6202ee5db9aa722ebeba7bf0" Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.175751 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-vfstf"] Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.180357 4742 scope.go:117] "RemoveContainer" containerID="e2b2237464488a3a85672795a12d4413a69b6eee6202ee5db9aa722ebeba7bf0" Dec 05 05:57:18 crc kubenswrapper[4742]: E1205 05:57:18.180796 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2b2237464488a3a85672795a12d4413a69b6eee6202ee5db9aa722ebeba7bf0\": container with ID starting with e2b2237464488a3a85672795a12d4413a69b6eee6202ee5db9aa722ebeba7bf0 not found: ID does not exist" containerID="e2b2237464488a3a85672795a12d4413a69b6eee6202ee5db9aa722ebeba7bf0" Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.180845 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2b2237464488a3a85672795a12d4413a69b6eee6202ee5db9aa722ebeba7bf0"} err="failed to get container status \"e2b2237464488a3a85672795a12d4413a69b6eee6202ee5db9aa722ebeba7bf0\": rpc error: code = NotFound desc = could not find container \"e2b2237464488a3a85672795a12d4413a69b6eee6202ee5db9aa722ebeba7bf0\": container with ID starting with e2b2237464488a3a85672795a12d4413a69b6eee6202ee5db9aa722ebeba7bf0 not found: ID does not exist" Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.182228 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-vfstf"] Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.194132 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-2279c"] Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.200133 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-2279c"] Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.395829 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c7188d0-4020-4749-8bd6-98b637ce3f3c" path="/var/lib/kubelet/pods/0c7188d0-4020-4749-8bd6-98b637ce3f3c/volumes" Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.397501 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a302e9ed-44a5-41e8-8e91-c37771dca329" path="/var/lib/kubelet/pods/a302e9ed-44a5-41e8-8e91-c37771dca329/volumes" Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.785289 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nk79d"] Dec 05 05:57:18 crc kubenswrapper[4742]: E1205 05:57:18.785798 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a302e9ed-44a5-41e8-8e91-c37771dca329" containerName="controller-manager" Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.785809 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="a302e9ed-44a5-41e8-8e91-c37771dca329" containerName="controller-manager" Dec 05 05:57:18 crc kubenswrapper[4742]: E1205 05:57:18.785821 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c7188d0-4020-4749-8bd6-98b637ce3f3c" containerName="route-controller-manager" Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.785827 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c7188d0-4020-4749-8bd6-98b637ce3f3c" containerName="route-controller-manager" Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.785915 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="a302e9ed-44a5-41e8-8e91-c37771dca329" containerName="controller-manager" Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.785925 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c7188d0-4020-4749-8bd6-98b637ce3f3c" containerName="route-controller-manager" Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.786578 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nk79d" Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.788557 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.800050 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nk79d"] Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.918734 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzfql\" (UniqueName: \"kubernetes.io/projected/97e4e804-858b-4992-9bce-31ede1359c3e-kube-api-access-rzfql\") pod \"redhat-marketplace-nk79d\" (UID: \"97e4e804-858b-4992-9bce-31ede1359c3e\") " pod="openshift-marketplace/redhat-marketplace-nk79d" Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.918824 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97e4e804-858b-4992-9bce-31ede1359c3e-catalog-content\") pod \"redhat-marketplace-nk79d\" (UID: \"97e4e804-858b-4992-9bce-31ede1359c3e\") " pod="openshift-marketplace/redhat-marketplace-nk79d" Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.918913 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97e4e804-858b-4992-9bce-31ede1359c3e-utilities\") pod \"redhat-marketplace-nk79d\" (UID: \"97e4e804-858b-4992-9bce-31ede1359c3e\") " pod="openshift-marketplace/redhat-marketplace-nk79d" Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.989538 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-wpdk5"] Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.990588 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wpdk5" Dec 05 05:57:18 crc kubenswrapper[4742]: I1205 05:57:18.995699 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.020410 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzfql\" (UniqueName: \"kubernetes.io/projected/97e4e804-858b-4992-9bce-31ede1359c3e-kube-api-access-rzfql\") pod \"redhat-marketplace-nk79d\" (UID: \"97e4e804-858b-4992-9bce-31ede1359c3e\") " pod="openshift-marketplace/redhat-marketplace-nk79d" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.020559 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97e4e804-858b-4992-9bce-31ede1359c3e-catalog-content\") pod \"redhat-marketplace-nk79d\" (UID: \"97e4e804-858b-4992-9bce-31ede1359c3e\") " pod="openshift-marketplace/redhat-marketplace-nk79d" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.020608 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97e4e804-858b-4992-9bce-31ede1359c3e-utilities\") pod \"redhat-marketplace-nk79d\" (UID: \"97e4e804-858b-4992-9bce-31ede1359c3e\") " pod="openshift-marketplace/redhat-marketplace-nk79d" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.021188 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97e4e804-858b-4992-9bce-31ede1359c3e-catalog-content\") pod \"redhat-marketplace-nk79d\" (UID: \"97e4e804-858b-4992-9bce-31ede1359c3e\") " pod="openshift-marketplace/redhat-marketplace-nk79d" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.021251 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97e4e804-858b-4992-9bce-31ede1359c3e-utilities\") pod \"redhat-marketplace-nk79d\" (UID: \"97e4e804-858b-4992-9bce-31ede1359c3e\") " pod="openshift-marketplace/redhat-marketplace-nk79d" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.042884 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzfql\" (UniqueName: \"kubernetes.io/projected/97e4e804-858b-4992-9bce-31ede1359c3e-kube-api-access-rzfql\") pod \"redhat-marketplace-nk79d\" (UID: \"97e4e804-858b-4992-9bce-31ede1359c3e\") " pod="openshift-marketplace/redhat-marketplace-nk79d" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.047505 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wpdk5"] Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.121518 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gck6l\" (UniqueName: \"kubernetes.io/projected/c54462f3-33a2-4bf4-9601-5a321e633702-kube-api-access-gck6l\") pod \"redhat-operators-wpdk5\" (UID: \"c54462f3-33a2-4bf4-9601-5a321e633702\") " pod="openshift-marketplace/redhat-operators-wpdk5" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.121579 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c54462f3-33a2-4bf4-9601-5a321e633702-utilities\") pod \"redhat-operators-wpdk5\" (UID: \"c54462f3-33a2-4bf4-9601-5a321e633702\") " pod="openshift-marketplace/redhat-operators-wpdk5" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.121623 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c54462f3-33a2-4bf4-9601-5a321e633702-catalog-content\") pod \"redhat-operators-wpdk5\" (UID: \"c54462f3-33a2-4bf4-9601-5a321e633702\") " pod="openshift-marketplace/redhat-operators-wpdk5" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.122997 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nk79d" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.125930 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t26cm" event={"ID":"96fc5c33-e057-48e7-9e20-3b8860f09a1f","Type":"ContainerStarted","Data":"b72c9f30bdd5f040f43da5a3210736f141ce80a9133c179afe7558c7806b8a89"} Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.128636 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dms6l" event={"ID":"e4265f02-a9b1-4e0d-b568-e928700ff3f6","Type":"ContainerStarted","Data":"580f39c59a7f922aa43134ccab3b346649b900a52a7973b2d5b0f996c9806e11"} Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.164480 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-t26cm" podStartSLOduration=1.760947982 podStartE2EDuration="3.164457719s" podCreationTimestamp="2025-12-05 05:57:16 +0000 UTC" firstStartedPulling="2025-12-05 05:57:17.090660867 +0000 UTC m=+313.002795929" lastFinishedPulling="2025-12-05 05:57:18.494170604 +0000 UTC m=+314.406305666" observedRunningTime="2025-12-05 05:57:19.146494012 +0000 UTC m=+315.058629074" watchObservedRunningTime="2025-12-05 05:57:19.164457719 +0000 UTC m=+315.076592791" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.169492 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-b58b46d55-bgv6l"] Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.170201 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-b58b46d55-bgv6l" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.174133 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.174468 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.174601 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.174705 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.174801 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.175166 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.177637 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6dc9b44d9-chhvf"] Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.178171 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6dc9b44d9-chhvf" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.183908 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dms6l" podStartSLOduration=1.5742636829999999 podStartE2EDuration="3.183886647s" podCreationTimestamp="2025-12-05 05:57:16 +0000 UTC" firstStartedPulling="2025-12-05 05:57:17.090968995 +0000 UTC m=+313.003104057" lastFinishedPulling="2025-12-05 05:57:18.700591949 +0000 UTC m=+314.612727021" observedRunningTime="2025-12-05 05:57:19.163699447 +0000 UTC m=+315.075834539" watchObservedRunningTime="2025-12-05 05:57:19.183886647 +0000 UTC m=+315.096021729" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.187413 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.187652 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.187962 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.188614 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.189409 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.189858 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.195735 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-b58b46d55-bgv6l"] Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.205240 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6dc9b44d9-chhvf"] Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.216547 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.222594 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/863e45b3-6a3b-4bf8-990e-ed3191f23c3f-serving-cert\") pod \"controller-manager-6dc9b44d9-chhvf\" (UID: \"863e45b3-6a3b-4bf8-990e-ed3191f23c3f\") " pod="openshift-controller-manager/controller-manager-6dc9b44d9-chhvf" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.222745 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c54462f3-33a2-4bf4-9601-5a321e633702-catalog-content\") pod \"redhat-operators-wpdk5\" (UID: \"c54462f3-33a2-4bf4-9601-5a321e633702\") " pod="openshift-marketplace/redhat-operators-wpdk5" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.222849 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/863e45b3-6a3b-4bf8-990e-ed3191f23c3f-config\") pod \"controller-manager-6dc9b44d9-chhvf\" (UID: \"863e45b3-6a3b-4bf8-990e-ed3191f23c3f\") " pod="openshift-controller-manager/controller-manager-6dc9b44d9-chhvf" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.222920 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/664a4161-268b-4662-b800-448084f2dd27-serving-cert\") pod \"route-controller-manager-b58b46d55-bgv6l\" (UID: \"664a4161-268b-4662-b800-448084f2dd27\") " pod="openshift-route-controller-manager/route-controller-manager-b58b46d55-bgv6l" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.223012 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gck6l\" (UniqueName: \"kubernetes.io/projected/c54462f3-33a2-4bf4-9601-5a321e633702-kube-api-access-gck6l\") pod \"redhat-operators-wpdk5\" (UID: \"c54462f3-33a2-4bf4-9601-5a321e633702\") " pod="openshift-marketplace/redhat-operators-wpdk5" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.223216 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/664a4161-268b-4662-b800-448084f2dd27-config\") pod \"route-controller-manager-b58b46d55-bgv6l\" (UID: \"664a4161-268b-4662-b800-448084f2dd27\") " pod="openshift-route-controller-manager/route-controller-manager-b58b46d55-bgv6l" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.223422 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/863e45b3-6a3b-4bf8-990e-ed3191f23c3f-client-ca\") pod \"controller-manager-6dc9b44d9-chhvf\" (UID: \"863e45b3-6a3b-4bf8-990e-ed3191f23c3f\") " pod="openshift-controller-manager/controller-manager-6dc9b44d9-chhvf" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.223676 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6rtn\" (UniqueName: \"kubernetes.io/projected/664a4161-268b-4662-b800-448084f2dd27-kube-api-access-v6rtn\") pod \"route-controller-manager-b58b46d55-bgv6l\" (UID: \"664a4161-268b-4662-b800-448084f2dd27\") " pod="openshift-route-controller-manager/route-controller-manager-b58b46d55-bgv6l" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.223687 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c54462f3-33a2-4bf4-9601-5a321e633702-catalog-content\") pod \"redhat-operators-wpdk5\" (UID: \"c54462f3-33a2-4bf4-9601-5a321e633702\") " pod="openshift-marketplace/redhat-operators-wpdk5" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.223808 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c54462f3-33a2-4bf4-9601-5a321e633702-utilities\") pod \"redhat-operators-wpdk5\" (UID: \"c54462f3-33a2-4bf4-9601-5a321e633702\") " pod="openshift-marketplace/redhat-operators-wpdk5" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.224047 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k74n2\" (UniqueName: \"kubernetes.io/projected/863e45b3-6a3b-4bf8-990e-ed3191f23c3f-kube-api-access-k74n2\") pod \"controller-manager-6dc9b44d9-chhvf\" (UID: \"863e45b3-6a3b-4bf8-990e-ed3191f23c3f\") " pod="openshift-controller-manager/controller-manager-6dc9b44d9-chhvf" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.224111 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/863e45b3-6a3b-4bf8-990e-ed3191f23c3f-proxy-ca-bundles\") pod \"controller-manager-6dc9b44d9-chhvf\" (UID: \"863e45b3-6a3b-4bf8-990e-ed3191f23c3f\") " pod="openshift-controller-manager/controller-manager-6dc9b44d9-chhvf" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.224142 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/664a4161-268b-4662-b800-448084f2dd27-client-ca\") pod \"route-controller-manager-b58b46d55-bgv6l\" (UID: \"664a4161-268b-4662-b800-448084f2dd27\") " pod="openshift-route-controller-manager/route-controller-manager-b58b46d55-bgv6l" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.230986 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c54462f3-33a2-4bf4-9601-5a321e633702-utilities\") pod \"redhat-operators-wpdk5\" (UID: \"c54462f3-33a2-4bf4-9601-5a321e633702\") " pod="openshift-marketplace/redhat-operators-wpdk5" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.239729 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gck6l\" (UniqueName: \"kubernetes.io/projected/c54462f3-33a2-4bf4-9601-5a321e633702-kube-api-access-gck6l\") pod \"redhat-operators-wpdk5\" (UID: \"c54462f3-33a2-4bf4-9601-5a321e633702\") " pod="openshift-marketplace/redhat-operators-wpdk5" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.311529 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wpdk5" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.324776 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/863e45b3-6a3b-4bf8-990e-ed3191f23c3f-config\") pod \"controller-manager-6dc9b44d9-chhvf\" (UID: \"863e45b3-6a3b-4bf8-990e-ed3191f23c3f\") " pod="openshift-controller-manager/controller-manager-6dc9b44d9-chhvf" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.324816 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/664a4161-268b-4662-b800-448084f2dd27-serving-cert\") pod \"route-controller-manager-b58b46d55-bgv6l\" (UID: \"664a4161-268b-4662-b800-448084f2dd27\") " pod="openshift-route-controller-manager/route-controller-manager-b58b46d55-bgv6l" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.324838 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/664a4161-268b-4662-b800-448084f2dd27-config\") pod \"route-controller-manager-b58b46d55-bgv6l\" (UID: \"664a4161-268b-4662-b800-448084f2dd27\") " pod="openshift-route-controller-manager/route-controller-manager-b58b46d55-bgv6l" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.324863 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/863e45b3-6a3b-4bf8-990e-ed3191f23c3f-client-ca\") pod \"controller-manager-6dc9b44d9-chhvf\" (UID: \"863e45b3-6a3b-4bf8-990e-ed3191f23c3f\") " pod="openshift-controller-manager/controller-manager-6dc9b44d9-chhvf" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.324881 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6rtn\" (UniqueName: \"kubernetes.io/projected/664a4161-268b-4662-b800-448084f2dd27-kube-api-access-v6rtn\") pod \"route-controller-manager-b58b46d55-bgv6l\" (UID: \"664a4161-268b-4662-b800-448084f2dd27\") " pod="openshift-route-controller-manager/route-controller-manager-b58b46d55-bgv6l" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.324912 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k74n2\" (UniqueName: \"kubernetes.io/projected/863e45b3-6a3b-4bf8-990e-ed3191f23c3f-kube-api-access-k74n2\") pod \"controller-manager-6dc9b44d9-chhvf\" (UID: \"863e45b3-6a3b-4bf8-990e-ed3191f23c3f\") " pod="openshift-controller-manager/controller-manager-6dc9b44d9-chhvf" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.324932 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/863e45b3-6a3b-4bf8-990e-ed3191f23c3f-proxy-ca-bundles\") pod \"controller-manager-6dc9b44d9-chhvf\" (UID: \"863e45b3-6a3b-4bf8-990e-ed3191f23c3f\") " pod="openshift-controller-manager/controller-manager-6dc9b44d9-chhvf" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.324957 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/664a4161-268b-4662-b800-448084f2dd27-client-ca\") pod \"route-controller-manager-b58b46d55-bgv6l\" (UID: \"664a4161-268b-4662-b800-448084f2dd27\") " pod="openshift-route-controller-manager/route-controller-manager-b58b46d55-bgv6l" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.324980 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/863e45b3-6a3b-4bf8-990e-ed3191f23c3f-serving-cert\") pod \"controller-manager-6dc9b44d9-chhvf\" (UID: \"863e45b3-6a3b-4bf8-990e-ed3191f23c3f\") " pod="openshift-controller-manager/controller-manager-6dc9b44d9-chhvf" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.325925 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/863e45b3-6a3b-4bf8-990e-ed3191f23c3f-client-ca\") pod \"controller-manager-6dc9b44d9-chhvf\" (UID: \"863e45b3-6a3b-4bf8-990e-ed3191f23c3f\") " pod="openshift-controller-manager/controller-manager-6dc9b44d9-chhvf" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.326951 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/863e45b3-6a3b-4bf8-990e-ed3191f23c3f-proxy-ca-bundles\") pod \"controller-manager-6dc9b44d9-chhvf\" (UID: \"863e45b3-6a3b-4bf8-990e-ed3191f23c3f\") " pod="openshift-controller-manager/controller-manager-6dc9b44d9-chhvf" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.327585 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/863e45b3-6a3b-4bf8-990e-ed3191f23c3f-config\") pod \"controller-manager-6dc9b44d9-chhvf\" (UID: \"863e45b3-6a3b-4bf8-990e-ed3191f23c3f\") " pod="openshift-controller-manager/controller-manager-6dc9b44d9-chhvf" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.329681 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/664a4161-268b-4662-b800-448084f2dd27-client-ca\") pod \"route-controller-manager-b58b46d55-bgv6l\" (UID: \"664a4161-268b-4662-b800-448084f2dd27\") " pod="openshift-route-controller-manager/route-controller-manager-b58b46d55-bgv6l" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.330244 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/863e45b3-6a3b-4bf8-990e-ed3191f23c3f-serving-cert\") pod \"controller-manager-6dc9b44d9-chhvf\" (UID: \"863e45b3-6a3b-4bf8-990e-ed3191f23c3f\") " pod="openshift-controller-manager/controller-manager-6dc9b44d9-chhvf" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.332343 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/664a4161-268b-4662-b800-448084f2dd27-serving-cert\") pod \"route-controller-manager-b58b46d55-bgv6l\" (UID: \"664a4161-268b-4662-b800-448084f2dd27\") " pod="openshift-route-controller-manager/route-controller-manager-b58b46d55-bgv6l" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.341452 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/664a4161-268b-4662-b800-448084f2dd27-config\") pod \"route-controller-manager-b58b46d55-bgv6l\" (UID: \"664a4161-268b-4662-b800-448084f2dd27\") " pod="openshift-route-controller-manager/route-controller-manager-b58b46d55-bgv6l" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.349241 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6rtn\" (UniqueName: \"kubernetes.io/projected/664a4161-268b-4662-b800-448084f2dd27-kube-api-access-v6rtn\") pod \"route-controller-manager-b58b46d55-bgv6l\" (UID: \"664a4161-268b-4662-b800-448084f2dd27\") " pod="openshift-route-controller-manager/route-controller-manager-b58b46d55-bgv6l" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.352461 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k74n2\" (UniqueName: \"kubernetes.io/projected/863e45b3-6a3b-4bf8-990e-ed3191f23c3f-kube-api-access-k74n2\") pod \"controller-manager-6dc9b44d9-chhvf\" (UID: \"863e45b3-6a3b-4bf8-990e-ed3191f23c3f\") " pod="openshift-controller-manager/controller-manager-6dc9b44d9-chhvf" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.394623 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nk79d"] Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.497203 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-b58b46d55-bgv6l" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.508465 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6dc9b44d9-chhvf" Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.640236 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-b58b46d55-bgv6l"] Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.646848 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6dc9b44d9-chhvf"] Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.723276 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wpdk5"] Dec 05 05:57:19 crc kubenswrapper[4742]: W1205 05:57:19.733365 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc54462f3_33a2_4bf4_9601_5a321e633702.slice/crio-847409a8e1c6a16f8fc1a51e12a627f90fbb0d57522b89adce0d77b00da3a5ae WatchSource:0}: Error finding container 847409a8e1c6a16f8fc1a51e12a627f90fbb0d57522b89adce0d77b00da3a5ae: Status 404 returned error can't find the container with id 847409a8e1c6a16f8fc1a51e12a627f90fbb0d57522b89adce0d77b00da3a5ae Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.768323 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6dc9b44d9-chhvf"] Dec 05 05:57:19 crc kubenswrapper[4742]: W1205 05:57:19.783416 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod863e45b3_6a3b_4bf8_990e_ed3191f23c3f.slice/crio-fd038118556e46d9f0ab88b8a3adeb0e90c981e669b5c7e688c3ffbb83fc1606 WatchSource:0}: Error finding container fd038118556e46d9f0ab88b8a3adeb0e90c981e669b5c7e688c3ffbb83fc1606: Status 404 returned error can't find the container with id fd038118556e46d9f0ab88b8a3adeb0e90c981e669b5c7e688c3ffbb83fc1606 Dec 05 05:57:19 crc kubenswrapper[4742]: I1205 05:57:19.831791 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-b58b46d55-bgv6l"] Dec 05 05:57:19 crc kubenswrapper[4742]: W1205 05:57:19.848254 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod664a4161_268b_4662_b800_448084f2dd27.slice/crio-9ff445321c3db15a703ee2ff855a85c306b0e147de34a1a09affdb8252ede804 WatchSource:0}: Error finding container 9ff445321c3db15a703ee2ff855a85c306b0e147de34a1a09affdb8252ede804: Status 404 returned error can't find the container with id 9ff445321c3db15a703ee2ff855a85c306b0e147de34a1a09affdb8252ede804 Dec 05 05:57:20 crc kubenswrapper[4742]: I1205 05:57:20.137390 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-b58b46d55-bgv6l" event={"ID":"664a4161-268b-4662-b800-448084f2dd27","Type":"ContainerStarted","Data":"1ef8860a8b7c78987c6ea38c10f24f17d91216b7953ce2c577dd2f99c8f9b7ed"} Dec 05 05:57:20 crc kubenswrapper[4742]: I1205 05:57:20.137663 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-b58b46d55-bgv6l" Dec 05 05:57:20 crc kubenswrapper[4742]: I1205 05:57:20.137674 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-b58b46d55-bgv6l" event={"ID":"664a4161-268b-4662-b800-448084f2dd27","Type":"ContainerStarted","Data":"9ff445321c3db15a703ee2ff855a85c306b0e147de34a1a09affdb8252ede804"} Dec 05 05:57:20 crc kubenswrapper[4742]: I1205 05:57:20.137524 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-b58b46d55-bgv6l" podUID="664a4161-268b-4662-b800-448084f2dd27" containerName="route-controller-manager" containerID="cri-o://1ef8860a8b7c78987c6ea38c10f24f17d91216b7953ce2c577dd2f99c8f9b7ed" gracePeriod=30 Dec 05 05:57:20 crc kubenswrapper[4742]: I1205 05:57:20.140941 4742 generic.go:334] "Generic (PLEG): container finished" podID="c54462f3-33a2-4bf4-9601-5a321e633702" containerID="d1ab18130d8c006dd3903b26a7d69f3415e8862ba5a07ac6563da4b3b495c871" exitCode=0 Dec 05 05:57:20 crc kubenswrapper[4742]: I1205 05:57:20.141044 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wpdk5" event={"ID":"c54462f3-33a2-4bf4-9601-5a321e633702","Type":"ContainerDied","Data":"d1ab18130d8c006dd3903b26a7d69f3415e8862ba5a07ac6563da4b3b495c871"} Dec 05 05:57:20 crc kubenswrapper[4742]: I1205 05:57:20.141111 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wpdk5" event={"ID":"c54462f3-33a2-4bf4-9601-5a321e633702","Type":"ContainerStarted","Data":"847409a8e1c6a16f8fc1a51e12a627f90fbb0d57522b89adce0d77b00da3a5ae"} Dec 05 05:57:20 crc kubenswrapper[4742]: I1205 05:57:20.146529 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6dc9b44d9-chhvf" event={"ID":"863e45b3-6a3b-4bf8-990e-ed3191f23c3f","Type":"ContainerStarted","Data":"ef0beee6f8177bec906a1de90568326df25c5a2f0150db01424b0f63bfc6f703"} Dec 05 05:57:20 crc kubenswrapper[4742]: I1205 05:57:20.146628 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6dc9b44d9-chhvf" event={"ID":"863e45b3-6a3b-4bf8-990e-ed3191f23c3f","Type":"ContainerStarted","Data":"fd038118556e46d9f0ab88b8a3adeb0e90c981e669b5c7e688c3ffbb83fc1606"} Dec 05 05:57:20 crc kubenswrapper[4742]: I1205 05:57:20.146844 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-6dc9b44d9-chhvf" podUID="863e45b3-6a3b-4bf8-990e-ed3191f23c3f" containerName="controller-manager" containerID="cri-o://ef0beee6f8177bec906a1de90568326df25c5a2f0150db01424b0f63bfc6f703" gracePeriod=30 Dec 05 05:57:20 crc kubenswrapper[4742]: I1205 05:57:20.147298 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6dc9b44d9-chhvf" Dec 05 05:57:20 crc kubenswrapper[4742]: I1205 05:57:20.162422 4742 generic.go:334] "Generic (PLEG): container finished" podID="97e4e804-858b-4992-9bce-31ede1359c3e" containerID="500d4bc92b0b786faeae46b70509a923a718b4e18bce456a3384a5f9f7738ab2" exitCode=0 Dec 05 05:57:20 crc kubenswrapper[4742]: I1205 05:57:20.162536 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nk79d" event={"ID":"97e4e804-858b-4992-9bce-31ede1359c3e","Type":"ContainerDied","Data":"500d4bc92b0b786faeae46b70509a923a718b4e18bce456a3384a5f9f7738ab2"} Dec 05 05:57:20 crc kubenswrapper[4742]: I1205 05:57:20.162606 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nk79d" event={"ID":"97e4e804-858b-4992-9bce-31ede1359c3e","Type":"ContainerStarted","Data":"f3cc93cf22af1b8f5579b88deb94d12ac0a1de2a481d01479f22573d58545e4f"} Dec 05 05:57:20 crc kubenswrapper[4742]: I1205 05:57:20.178173 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-b58b46d55-bgv6l" podStartSLOduration=3.178151665 podStartE2EDuration="3.178151665s" podCreationTimestamp="2025-12-05 05:57:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:57:20.176727705 +0000 UTC m=+316.088862787" watchObservedRunningTime="2025-12-05 05:57:20.178151665 +0000 UTC m=+316.090286757" Dec 05 05:57:20 crc kubenswrapper[4742]: I1205 05:57:20.185215 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6dc9b44d9-chhvf" Dec 05 05:57:20 crc kubenswrapper[4742]: I1205 05:57:20.291160 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6dc9b44d9-chhvf" podStartSLOduration=3.291038521 podStartE2EDuration="3.291038521s" podCreationTimestamp="2025-12-05 05:57:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:57:20.280337949 +0000 UTC m=+316.192473011" watchObservedRunningTime="2025-12-05 05:57:20.291038521 +0000 UTC m=+316.203173593" Dec 05 05:57:20 crc kubenswrapper[4742]: I1205 05:57:20.767782 4742 patch_prober.go:28] interesting pod/route-controller-manager-b58b46d55-bgv6l container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.63:8443/healthz\": read tcp 10.217.0.2:48706->10.217.0.63:8443: read: connection reset by peer" start-of-body= Dec 05 05:57:20 crc kubenswrapper[4742]: I1205 05:57:20.767839 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-b58b46d55-bgv6l" podUID="664a4161-268b-4662-b800-448084f2dd27" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.63:8443/healthz\": read tcp 10.217.0.2:48706->10.217.0.63:8443: read: connection reset by peer" Dec 05 05:57:20 crc kubenswrapper[4742]: I1205 05:57:20.994611 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6dc9b44d9-chhvf" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.046285 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-64c6947768-f5wpf"] Dec 05 05:57:21 crc kubenswrapper[4742]: E1205 05:57:21.046534 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="863e45b3-6a3b-4bf8-990e-ed3191f23c3f" containerName="controller-manager" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.046547 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="863e45b3-6a3b-4bf8-990e-ed3191f23c3f" containerName="controller-manager" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.046640 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="863e45b3-6a3b-4bf8-990e-ed3191f23c3f" containerName="controller-manager" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.047230 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/863e45b3-6a3b-4bf8-990e-ed3191f23c3f-client-ca\") pod \"863e45b3-6a3b-4bf8-990e-ed3191f23c3f\" (UID: \"863e45b3-6a3b-4bf8-990e-ed3191f23c3f\") " Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.047262 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/863e45b3-6a3b-4bf8-990e-ed3191f23c3f-serving-cert\") pod \"863e45b3-6a3b-4bf8-990e-ed3191f23c3f\" (UID: \"863e45b3-6a3b-4bf8-990e-ed3191f23c3f\") " Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.047318 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/863e45b3-6a3b-4bf8-990e-ed3191f23c3f-proxy-ca-bundles\") pod \"863e45b3-6a3b-4bf8-990e-ed3191f23c3f\" (UID: \"863e45b3-6a3b-4bf8-990e-ed3191f23c3f\") " Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.047372 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k74n2\" (UniqueName: \"kubernetes.io/projected/863e45b3-6a3b-4bf8-990e-ed3191f23c3f-kube-api-access-k74n2\") pod \"863e45b3-6a3b-4bf8-990e-ed3191f23c3f\" (UID: \"863e45b3-6a3b-4bf8-990e-ed3191f23c3f\") " Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.047391 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/863e45b3-6a3b-4bf8-990e-ed3191f23c3f-config\") pod \"863e45b3-6a3b-4bf8-990e-ed3191f23c3f\" (UID: \"863e45b3-6a3b-4bf8-990e-ed3191f23c3f\") " Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.048649 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/863e45b3-6a3b-4bf8-990e-ed3191f23c3f-config" (OuterVolumeSpecName: "config") pod "863e45b3-6a3b-4bf8-990e-ed3191f23c3f" (UID: "863e45b3-6a3b-4bf8-990e-ed3191f23c3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.048730 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-64c6947768-f5wpf" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.050318 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/863e45b3-6a3b-4bf8-990e-ed3191f23c3f-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "863e45b3-6a3b-4bf8-990e-ed3191f23c3f" (UID: "863e45b3-6a3b-4bf8-990e-ed3191f23c3f"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.050592 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/863e45b3-6a3b-4bf8-990e-ed3191f23c3f-client-ca" (OuterVolumeSpecName: "client-ca") pod "863e45b3-6a3b-4bf8-990e-ed3191f23c3f" (UID: "863e45b3-6a3b-4bf8-990e-ed3191f23c3f"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.054368 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-64c6947768-f5wpf"] Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.055278 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/863e45b3-6a3b-4bf8-990e-ed3191f23c3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "863e45b3-6a3b-4bf8-990e-ed3191f23c3f" (UID: "863e45b3-6a3b-4bf8-990e-ed3191f23c3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.057946 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/863e45b3-6a3b-4bf8-990e-ed3191f23c3f-kube-api-access-k74n2" (OuterVolumeSpecName: "kube-api-access-k74n2") pod "863e45b3-6a3b-4bf8-990e-ed3191f23c3f" (UID: "863e45b3-6a3b-4bf8-990e-ed3191f23c3f"). InnerVolumeSpecName "kube-api-access-k74n2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.100825 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-b58b46d55-bgv6l_664a4161-268b-4662-b800-448084f2dd27/route-controller-manager/0.log" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.100887 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-b58b46d55-bgv6l" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.148972 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v6rtn\" (UniqueName: \"kubernetes.io/projected/664a4161-268b-4662-b800-448084f2dd27-kube-api-access-v6rtn\") pod \"664a4161-268b-4662-b800-448084f2dd27\" (UID: \"664a4161-268b-4662-b800-448084f2dd27\") " Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.149048 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/664a4161-268b-4662-b800-448084f2dd27-serving-cert\") pod \"664a4161-268b-4662-b800-448084f2dd27\" (UID: \"664a4161-268b-4662-b800-448084f2dd27\") " Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.149103 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/664a4161-268b-4662-b800-448084f2dd27-client-ca\") pod \"664a4161-268b-4662-b800-448084f2dd27\" (UID: \"664a4161-268b-4662-b800-448084f2dd27\") " Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.149150 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/664a4161-268b-4662-b800-448084f2dd27-config\") pod \"664a4161-268b-4662-b800-448084f2dd27\" (UID: \"664a4161-268b-4662-b800-448084f2dd27\") " Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.149315 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a35894a-dbdb-49aa-974d-de3dcc6577c5-config\") pod \"controller-manager-64c6947768-f5wpf\" (UID: \"1a35894a-dbdb-49aa-974d-de3dcc6577c5\") " pod="openshift-controller-manager/controller-manager-64c6947768-f5wpf" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.149365 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1a35894a-dbdb-49aa-974d-de3dcc6577c5-client-ca\") pod \"controller-manager-64c6947768-f5wpf\" (UID: \"1a35894a-dbdb-49aa-974d-de3dcc6577c5\") " pod="openshift-controller-manager/controller-manager-64c6947768-f5wpf" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.149385 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a35894a-dbdb-49aa-974d-de3dcc6577c5-serving-cert\") pod \"controller-manager-64c6947768-f5wpf\" (UID: \"1a35894a-dbdb-49aa-974d-de3dcc6577c5\") " pod="openshift-controller-manager/controller-manager-64c6947768-f5wpf" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.149401 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8h9w\" (UniqueName: \"kubernetes.io/projected/1a35894a-dbdb-49aa-974d-de3dcc6577c5-kube-api-access-m8h9w\") pod \"controller-manager-64c6947768-f5wpf\" (UID: \"1a35894a-dbdb-49aa-974d-de3dcc6577c5\") " pod="openshift-controller-manager/controller-manager-64c6947768-f5wpf" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.149422 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1a35894a-dbdb-49aa-974d-de3dcc6577c5-proxy-ca-bundles\") pod \"controller-manager-64c6947768-f5wpf\" (UID: \"1a35894a-dbdb-49aa-974d-de3dcc6577c5\") " pod="openshift-controller-manager/controller-manager-64c6947768-f5wpf" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.149497 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/863e45b3-6a3b-4bf8-990e-ed3191f23c3f-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.149510 4742 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/863e45b3-6a3b-4bf8-990e-ed3191f23c3f-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.149519 4742 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/863e45b3-6a3b-4bf8-990e-ed3191f23c3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.149531 4742 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/863e45b3-6a3b-4bf8-990e-ed3191f23c3f-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.149541 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k74n2\" (UniqueName: \"kubernetes.io/projected/863e45b3-6a3b-4bf8-990e-ed3191f23c3f-kube-api-access-k74n2\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.151533 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/664a4161-268b-4662-b800-448084f2dd27-client-ca" (OuterVolumeSpecName: "client-ca") pod "664a4161-268b-4662-b800-448084f2dd27" (UID: "664a4161-268b-4662-b800-448084f2dd27"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.151568 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/664a4161-268b-4662-b800-448084f2dd27-config" (OuterVolumeSpecName: "config") pod "664a4161-268b-4662-b800-448084f2dd27" (UID: "664a4161-268b-4662-b800-448084f2dd27"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.152206 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/664a4161-268b-4662-b800-448084f2dd27-kube-api-access-v6rtn" (OuterVolumeSpecName: "kube-api-access-v6rtn") pod "664a4161-268b-4662-b800-448084f2dd27" (UID: "664a4161-268b-4662-b800-448084f2dd27"). InnerVolumeSpecName "kube-api-access-v6rtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.152411 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/664a4161-268b-4662-b800-448084f2dd27-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "664a4161-268b-4662-b800-448084f2dd27" (UID: "664a4161-268b-4662-b800-448084f2dd27"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.167782 4742 generic.go:334] "Generic (PLEG): container finished" podID="863e45b3-6a3b-4bf8-990e-ed3191f23c3f" containerID="ef0beee6f8177bec906a1de90568326df25c5a2f0150db01424b0f63bfc6f703" exitCode=0 Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.167858 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6dc9b44d9-chhvf" event={"ID":"863e45b3-6a3b-4bf8-990e-ed3191f23c3f","Type":"ContainerDied","Data":"ef0beee6f8177bec906a1de90568326df25c5a2f0150db01424b0f63bfc6f703"} Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.168147 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6dc9b44d9-chhvf" event={"ID":"863e45b3-6a3b-4bf8-990e-ed3191f23c3f","Type":"ContainerDied","Data":"fd038118556e46d9f0ab88b8a3adeb0e90c981e669b5c7e688c3ffbb83fc1606"} Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.168169 4742 scope.go:117] "RemoveContainer" containerID="ef0beee6f8177bec906a1de90568326df25c5a2f0150db01424b0f63bfc6f703" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.167877 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6dc9b44d9-chhvf" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.173943 4742 generic.go:334] "Generic (PLEG): container finished" podID="97e4e804-858b-4992-9bce-31ede1359c3e" containerID="5dc3a87556fd08abd23121fe5b9970d6a3aa9102ecdb163db30d4ce637d307f6" exitCode=0 Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.174239 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nk79d" event={"ID":"97e4e804-858b-4992-9bce-31ede1359c3e","Type":"ContainerDied","Data":"5dc3a87556fd08abd23121fe5b9970d6a3aa9102ecdb163db30d4ce637d307f6"} Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.176850 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-b58b46d55-bgv6l_664a4161-268b-4662-b800-448084f2dd27/route-controller-manager/0.log" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.176901 4742 generic.go:334] "Generic (PLEG): container finished" podID="664a4161-268b-4662-b800-448084f2dd27" containerID="1ef8860a8b7c78987c6ea38c10f24f17d91216b7953ce2c577dd2f99c8f9b7ed" exitCode=255 Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.177014 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-b58b46d55-bgv6l" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.177010 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-b58b46d55-bgv6l" event={"ID":"664a4161-268b-4662-b800-448084f2dd27","Type":"ContainerDied","Data":"1ef8860a8b7c78987c6ea38c10f24f17d91216b7953ce2c577dd2f99c8f9b7ed"} Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.177073 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-b58b46d55-bgv6l" event={"ID":"664a4161-268b-4662-b800-448084f2dd27","Type":"ContainerDied","Data":"9ff445321c3db15a703ee2ff855a85c306b0e147de34a1a09affdb8252ede804"} Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.183724 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wpdk5" event={"ID":"c54462f3-33a2-4bf4-9601-5a321e633702","Type":"ContainerStarted","Data":"d528774bfa405fc69d9f447957edd5f99d5f78810c2c58cd38b21365c86a36c1"} Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.203002 4742 scope.go:117] "RemoveContainer" containerID="ef0beee6f8177bec906a1de90568326df25c5a2f0150db01424b0f63bfc6f703" Dec 05 05:57:21 crc kubenswrapper[4742]: E1205 05:57:21.203495 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef0beee6f8177bec906a1de90568326df25c5a2f0150db01424b0f63bfc6f703\": container with ID starting with ef0beee6f8177bec906a1de90568326df25c5a2f0150db01424b0f63bfc6f703 not found: ID does not exist" containerID="ef0beee6f8177bec906a1de90568326df25c5a2f0150db01424b0f63bfc6f703" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.203532 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef0beee6f8177bec906a1de90568326df25c5a2f0150db01424b0f63bfc6f703"} err="failed to get container status \"ef0beee6f8177bec906a1de90568326df25c5a2f0150db01424b0f63bfc6f703\": rpc error: code = NotFound desc = could not find container \"ef0beee6f8177bec906a1de90568326df25c5a2f0150db01424b0f63bfc6f703\": container with ID starting with ef0beee6f8177bec906a1de90568326df25c5a2f0150db01424b0f63bfc6f703 not found: ID does not exist" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.203557 4742 scope.go:117] "RemoveContainer" containerID="1ef8860a8b7c78987c6ea38c10f24f17d91216b7953ce2c577dd2f99c8f9b7ed" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.216748 4742 scope.go:117] "RemoveContainer" containerID="1ef8860a8b7c78987c6ea38c10f24f17d91216b7953ce2c577dd2f99c8f9b7ed" Dec 05 05:57:21 crc kubenswrapper[4742]: E1205 05:57:21.217046 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ef8860a8b7c78987c6ea38c10f24f17d91216b7953ce2c577dd2f99c8f9b7ed\": container with ID starting with 1ef8860a8b7c78987c6ea38c10f24f17d91216b7953ce2c577dd2f99c8f9b7ed not found: ID does not exist" containerID="1ef8860a8b7c78987c6ea38c10f24f17d91216b7953ce2c577dd2f99c8f9b7ed" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.217092 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ef8860a8b7c78987c6ea38c10f24f17d91216b7953ce2c577dd2f99c8f9b7ed"} err="failed to get container status \"1ef8860a8b7c78987c6ea38c10f24f17d91216b7953ce2c577dd2f99c8f9b7ed\": rpc error: code = NotFound desc = could not find container \"1ef8860a8b7c78987c6ea38c10f24f17d91216b7953ce2c577dd2f99c8f9b7ed\": container with ID starting with 1ef8860a8b7c78987c6ea38c10f24f17d91216b7953ce2c577dd2f99c8f9b7ed not found: ID does not exist" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.239818 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6dc9b44d9-chhvf"] Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.243186 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-6dc9b44d9-chhvf"] Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.249974 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-b58b46d55-bgv6l"] Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.250728 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a35894a-dbdb-49aa-974d-de3dcc6577c5-config\") pod \"controller-manager-64c6947768-f5wpf\" (UID: \"1a35894a-dbdb-49aa-974d-de3dcc6577c5\") " pod="openshift-controller-manager/controller-manager-64c6947768-f5wpf" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.250789 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1a35894a-dbdb-49aa-974d-de3dcc6577c5-client-ca\") pod \"controller-manager-64c6947768-f5wpf\" (UID: \"1a35894a-dbdb-49aa-974d-de3dcc6577c5\") " pod="openshift-controller-manager/controller-manager-64c6947768-f5wpf" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.250814 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a35894a-dbdb-49aa-974d-de3dcc6577c5-serving-cert\") pod \"controller-manager-64c6947768-f5wpf\" (UID: \"1a35894a-dbdb-49aa-974d-de3dcc6577c5\") " pod="openshift-controller-manager/controller-manager-64c6947768-f5wpf" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.250832 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8h9w\" (UniqueName: \"kubernetes.io/projected/1a35894a-dbdb-49aa-974d-de3dcc6577c5-kube-api-access-m8h9w\") pod \"controller-manager-64c6947768-f5wpf\" (UID: \"1a35894a-dbdb-49aa-974d-de3dcc6577c5\") " pod="openshift-controller-manager/controller-manager-64c6947768-f5wpf" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.250853 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1a35894a-dbdb-49aa-974d-de3dcc6577c5-proxy-ca-bundles\") pod \"controller-manager-64c6947768-f5wpf\" (UID: \"1a35894a-dbdb-49aa-974d-de3dcc6577c5\") " pod="openshift-controller-manager/controller-manager-64c6947768-f5wpf" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.250888 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v6rtn\" (UniqueName: \"kubernetes.io/projected/664a4161-268b-4662-b800-448084f2dd27-kube-api-access-v6rtn\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.250898 4742 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/664a4161-268b-4662-b800-448084f2dd27-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.250907 4742 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/664a4161-268b-4662-b800-448084f2dd27-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.250916 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/664a4161-268b-4662-b800-448084f2dd27-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.251855 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1a35894a-dbdb-49aa-974d-de3dcc6577c5-client-ca\") pod \"controller-manager-64c6947768-f5wpf\" (UID: \"1a35894a-dbdb-49aa-974d-de3dcc6577c5\") " pod="openshift-controller-manager/controller-manager-64c6947768-f5wpf" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.251898 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/1a35894a-dbdb-49aa-974d-de3dcc6577c5-proxy-ca-bundles\") pod \"controller-manager-64c6947768-f5wpf\" (UID: \"1a35894a-dbdb-49aa-974d-de3dcc6577c5\") " pod="openshift-controller-manager/controller-manager-64c6947768-f5wpf" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.252588 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a35894a-dbdb-49aa-974d-de3dcc6577c5-config\") pod \"controller-manager-64c6947768-f5wpf\" (UID: \"1a35894a-dbdb-49aa-974d-de3dcc6577c5\") " pod="openshift-controller-manager/controller-manager-64c6947768-f5wpf" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.252697 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-b58b46d55-bgv6l"] Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.255566 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a35894a-dbdb-49aa-974d-de3dcc6577c5-serving-cert\") pod \"controller-manager-64c6947768-f5wpf\" (UID: \"1a35894a-dbdb-49aa-974d-de3dcc6577c5\") " pod="openshift-controller-manager/controller-manager-64c6947768-f5wpf" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.267532 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8h9w\" (UniqueName: \"kubernetes.io/projected/1a35894a-dbdb-49aa-974d-de3dcc6577c5-kube-api-access-m8h9w\") pod \"controller-manager-64c6947768-f5wpf\" (UID: \"1a35894a-dbdb-49aa-974d-de3dcc6577c5\") " pod="openshift-controller-manager/controller-manager-64c6947768-f5wpf" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.392111 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-64c6947768-f5wpf" Dec 05 05:57:21 crc kubenswrapper[4742]: I1205 05:57:21.622026 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-64c6947768-f5wpf"] Dec 05 05:57:21 crc kubenswrapper[4742]: W1205 05:57:21.684160 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1a35894a_dbdb_49aa_974d_de3dcc6577c5.slice/crio-b7e6752ffd8cdd1e5ee4bba3a15b27d62d148df389c40f5a2675468abe3f7e38 WatchSource:0}: Error finding container b7e6752ffd8cdd1e5ee4bba3a15b27d62d148df389c40f5a2675468abe3f7e38: Status 404 returned error can't find the container with id b7e6752ffd8cdd1e5ee4bba3a15b27d62d148df389c40f5a2675468abe3f7e38 Dec 05 05:57:22 crc kubenswrapper[4742]: I1205 05:57:22.191681 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nk79d" event={"ID":"97e4e804-858b-4992-9bce-31ede1359c3e","Type":"ContainerStarted","Data":"91e09d4cfcd9f458dcd20c2c78589a0b919760331261f893c3f4a81f45100712"} Dec 05 05:57:22 crc kubenswrapper[4742]: I1205 05:57:22.193245 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-64c6947768-f5wpf" event={"ID":"1a35894a-dbdb-49aa-974d-de3dcc6577c5","Type":"ContainerStarted","Data":"feae5b36a462ea69ef2deefabbb54d42a62a82fbb8ef28ed3c35e1bf0f13be81"} Dec 05 05:57:22 crc kubenswrapper[4742]: I1205 05:57:22.193274 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-64c6947768-f5wpf" event={"ID":"1a35894a-dbdb-49aa-974d-de3dcc6577c5","Type":"ContainerStarted","Data":"b7e6752ffd8cdd1e5ee4bba3a15b27d62d148df389c40f5a2675468abe3f7e38"} Dec 05 05:57:22 crc kubenswrapper[4742]: I1205 05:57:22.193413 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-64c6947768-f5wpf" Dec 05 05:57:22 crc kubenswrapper[4742]: I1205 05:57:22.195894 4742 generic.go:334] "Generic (PLEG): container finished" podID="c54462f3-33a2-4bf4-9601-5a321e633702" containerID="d528774bfa405fc69d9f447957edd5f99d5f78810c2c58cd38b21365c86a36c1" exitCode=0 Dec 05 05:57:22 crc kubenswrapper[4742]: I1205 05:57:22.195947 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wpdk5" event={"ID":"c54462f3-33a2-4bf4-9601-5a321e633702","Type":"ContainerDied","Data":"d528774bfa405fc69d9f447957edd5f99d5f78810c2c58cd38b21365c86a36c1"} Dec 05 05:57:22 crc kubenswrapper[4742]: I1205 05:57:22.201594 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-64c6947768-f5wpf" Dec 05 05:57:22 crc kubenswrapper[4742]: I1205 05:57:22.207992 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nk79d" podStartSLOduration=2.751610629 podStartE2EDuration="4.207979908s" podCreationTimestamp="2025-12-05 05:57:18 +0000 UTC" firstStartedPulling="2025-12-05 05:57:20.163857052 +0000 UTC m=+316.075992114" lastFinishedPulling="2025-12-05 05:57:21.620226331 +0000 UTC m=+317.532361393" observedRunningTime="2025-12-05 05:57:22.206071734 +0000 UTC m=+318.118206836" watchObservedRunningTime="2025-12-05 05:57:22.207979908 +0000 UTC m=+318.120114970" Dec 05 05:57:22 crc kubenswrapper[4742]: I1205 05:57:22.221348 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-64c6947768-f5wpf" podStartSLOduration=3.221330195 podStartE2EDuration="3.221330195s" podCreationTimestamp="2025-12-05 05:57:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:57:22.220633345 +0000 UTC m=+318.132768427" watchObservedRunningTime="2025-12-05 05:57:22.221330195 +0000 UTC m=+318.133465267" Dec 05 05:57:23 crc kubenswrapper[4742]: I1205 05:57:23.167041 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6c4db7d847-ghwvs"] Dec 05 05:57:23 crc kubenswrapper[4742]: E1205 05:57:23.167777 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="664a4161-268b-4662-b800-448084f2dd27" containerName="route-controller-manager" Dec 05 05:57:23 crc kubenswrapper[4742]: I1205 05:57:23.167802 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="664a4161-268b-4662-b800-448084f2dd27" containerName="route-controller-manager" Dec 05 05:57:23 crc kubenswrapper[4742]: I1205 05:57:23.168010 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="664a4161-268b-4662-b800-448084f2dd27" containerName="route-controller-manager" Dec 05 05:57:23 crc kubenswrapper[4742]: I1205 05:57:23.168761 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-ghwvs" Dec 05 05:57:23 crc kubenswrapper[4742]: I1205 05:57:23.178507 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6c4db7d847-ghwvs"] Dec 05 05:57:23 crc kubenswrapper[4742]: I1205 05:57:23.179367 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 05:57:23 crc kubenswrapper[4742]: I1205 05:57:23.179689 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 05:57:23 crc kubenswrapper[4742]: I1205 05:57:23.179753 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 05:57:23 crc kubenswrapper[4742]: I1205 05:57:23.179960 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 05:57:23 crc kubenswrapper[4742]: I1205 05:57:23.180208 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 05:57:23 crc kubenswrapper[4742]: I1205 05:57:23.180435 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 05:57:23 crc kubenswrapper[4742]: I1205 05:57:23.279865 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c78bz\" (UniqueName: \"kubernetes.io/projected/d5019b47-f2d8-439c-9f86-945c6af4c503-kube-api-access-c78bz\") pod \"route-controller-manager-6c4db7d847-ghwvs\" (UID: \"d5019b47-f2d8-439c-9f86-945c6af4c503\") " pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-ghwvs" Dec 05 05:57:23 crc kubenswrapper[4742]: I1205 05:57:23.279906 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d5019b47-f2d8-439c-9f86-945c6af4c503-client-ca\") pod \"route-controller-manager-6c4db7d847-ghwvs\" (UID: \"d5019b47-f2d8-439c-9f86-945c6af4c503\") " pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-ghwvs" Dec 05 05:57:23 crc kubenswrapper[4742]: I1205 05:57:23.280005 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5019b47-f2d8-439c-9f86-945c6af4c503-config\") pod \"route-controller-manager-6c4db7d847-ghwvs\" (UID: \"d5019b47-f2d8-439c-9f86-945c6af4c503\") " pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-ghwvs" Dec 05 05:57:23 crc kubenswrapper[4742]: I1205 05:57:23.280030 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d5019b47-f2d8-439c-9f86-945c6af4c503-serving-cert\") pod \"route-controller-manager-6c4db7d847-ghwvs\" (UID: \"d5019b47-f2d8-439c-9f86-945c6af4c503\") " pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-ghwvs" Dec 05 05:57:23 crc kubenswrapper[4742]: I1205 05:57:23.380873 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5019b47-f2d8-439c-9f86-945c6af4c503-config\") pod \"route-controller-manager-6c4db7d847-ghwvs\" (UID: \"d5019b47-f2d8-439c-9f86-945c6af4c503\") " pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-ghwvs" Dec 05 05:57:23 crc kubenswrapper[4742]: I1205 05:57:23.380921 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d5019b47-f2d8-439c-9f86-945c6af4c503-serving-cert\") pod \"route-controller-manager-6c4db7d847-ghwvs\" (UID: \"d5019b47-f2d8-439c-9f86-945c6af4c503\") " pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-ghwvs" Dec 05 05:57:23 crc kubenswrapper[4742]: I1205 05:57:23.380960 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c78bz\" (UniqueName: \"kubernetes.io/projected/d5019b47-f2d8-439c-9f86-945c6af4c503-kube-api-access-c78bz\") pod \"route-controller-manager-6c4db7d847-ghwvs\" (UID: \"d5019b47-f2d8-439c-9f86-945c6af4c503\") " pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-ghwvs" Dec 05 05:57:23 crc kubenswrapper[4742]: I1205 05:57:23.381005 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d5019b47-f2d8-439c-9f86-945c6af4c503-client-ca\") pod \"route-controller-manager-6c4db7d847-ghwvs\" (UID: \"d5019b47-f2d8-439c-9f86-945c6af4c503\") " pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-ghwvs" Dec 05 05:57:23 crc kubenswrapper[4742]: I1205 05:57:23.382009 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d5019b47-f2d8-439c-9f86-945c6af4c503-client-ca\") pod \"route-controller-manager-6c4db7d847-ghwvs\" (UID: \"d5019b47-f2d8-439c-9f86-945c6af4c503\") " pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-ghwvs" Dec 05 05:57:23 crc kubenswrapper[4742]: I1205 05:57:23.382930 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5019b47-f2d8-439c-9f86-945c6af4c503-config\") pod \"route-controller-manager-6c4db7d847-ghwvs\" (UID: \"d5019b47-f2d8-439c-9f86-945c6af4c503\") " pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-ghwvs" Dec 05 05:57:23 crc kubenswrapper[4742]: I1205 05:57:23.390271 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d5019b47-f2d8-439c-9f86-945c6af4c503-serving-cert\") pod \"route-controller-manager-6c4db7d847-ghwvs\" (UID: \"d5019b47-f2d8-439c-9f86-945c6af4c503\") " pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-ghwvs" Dec 05 05:57:23 crc kubenswrapper[4742]: I1205 05:57:23.415103 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c78bz\" (UniqueName: \"kubernetes.io/projected/d5019b47-f2d8-439c-9f86-945c6af4c503-kube-api-access-c78bz\") pod \"route-controller-manager-6c4db7d847-ghwvs\" (UID: \"d5019b47-f2d8-439c-9f86-945c6af4c503\") " pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-ghwvs" Dec 05 05:57:23 crc kubenswrapper[4742]: I1205 05:57:23.670547 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-ghwvs" Dec 05 05:57:23 crc kubenswrapper[4742]: I1205 05:57:23.694998 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="664a4161-268b-4662-b800-448084f2dd27" path="/var/lib/kubelet/pods/664a4161-268b-4662-b800-448084f2dd27/volumes" Dec 05 05:57:23 crc kubenswrapper[4742]: I1205 05:57:23.695725 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="863e45b3-6a3b-4bf8-990e-ed3191f23c3f" path="/var/lib/kubelet/pods/863e45b3-6a3b-4bf8-990e-ed3191f23c3f/volumes" Dec 05 05:57:24 crc kubenswrapper[4742]: I1205 05:57:24.110462 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6c4db7d847-ghwvs"] Dec 05 05:57:24 crc kubenswrapper[4742]: W1205 05:57:24.114318 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd5019b47_f2d8_439c_9f86_945c6af4c503.slice/crio-5bfdb90c8d862c4ab1350e7a85219fb8a0a435bc0873dbff91d266405ef8df2f WatchSource:0}: Error finding container 5bfdb90c8d862c4ab1350e7a85219fb8a0a435bc0873dbff91d266405ef8df2f: Status 404 returned error can't find the container with id 5bfdb90c8d862c4ab1350e7a85219fb8a0a435bc0873dbff91d266405ef8df2f Dec 05 05:57:24 crc kubenswrapper[4742]: I1205 05:57:24.399110 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6c4db7d847-ghwvs"] Dec 05 05:57:24 crc kubenswrapper[4742]: I1205 05:57:24.699274 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wpdk5" event={"ID":"c54462f3-33a2-4bf4-9601-5a321e633702","Type":"ContainerStarted","Data":"5ca4add7e57dd40df0db0cb40125527eb109bb608166b3ea3e2c3b046aa55fa5"} Dec 05 05:57:24 crc kubenswrapper[4742]: I1205 05:57:24.700703 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-ghwvs" event={"ID":"d5019b47-f2d8-439c-9f86-945c6af4c503","Type":"ContainerStarted","Data":"cda502223131d1e623124acf9d4965b09f714dbe60392db16f74d0d540f3ad48"} Dec 05 05:57:24 crc kubenswrapper[4742]: I1205 05:57:24.700874 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-ghwvs" Dec 05 05:57:24 crc kubenswrapper[4742]: I1205 05:57:24.700896 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-ghwvs" event={"ID":"d5019b47-f2d8-439c-9f86-945c6af4c503","Type":"ContainerStarted","Data":"5bfdb90c8d862c4ab1350e7a85219fb8a0a435bc0873dbff91d266405ef8df2f"} Dec 05 05:57:24 crc kubenswrapper[4742]: I1205 05:57:24.744276 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-wpdk5" podStartSLOduration=2.913854737 podStartE2EDuration="6.744255461s" podCreationTimestamp="2025-12-05 05:57:18 +0000 UTC" firstStartedPulling="2025-12-05 05:57:20.142457488 +0000 UTC m=+316.054592550" lastFinishedPulling="2025-12-05 05:57:23.972858212 +0000 UTC m=+319.884993274" observedRunningTime="2025-12-05 05:57:24.725660037 +0000 UTC m=+320.637795099" watchObservedRunningTime="2025-12-05 05:57:24.744255461 +0000 UTC m=+320.656390533" Dec 05 05:57:24 crc kubenswrapper[4742]: I1205 05:57:24.747021 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-ghwvs" podStartSLOduration=5.747010799 podStartE2EDuration="5.747010799s" podCreationTimestamp="2025-12-05 05:57:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:57:24.74455987 +0000 UTC m=+320.656694952" watchObservedRunningTime="2025-12-05 05:57:24.747010799 +0000 UTC m=+320.659145871" Dec 05 05:57:24 crc kubenswrapper[4742]: I1205 05:57:24.793961 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-ghwvs" Dec 05 05:57:25 crc kubenswrapper[4742]: I1205 05:57:25.706674 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-ghwvs" podUID="d5019b47-f2d8-439c-9f86-945c6af4c503" containerName="route-controller-manager" containerID="cri-o://cda502223131d1e623124acf9d4965b09f714dbe60392db16f74d0d540f3ad48" gracePeriod=30 Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.508072 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-t26cm" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.508505 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-t26cm" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.565905 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-t26cm" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.619317 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-ghwvs" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.646771 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5c8f48f598-95jvr"] Dec 05 05:57:26 crc kubenswrapper[4742]: E1205 05:57:26.647029 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5019b47-f2d8-439c-9f86-945c6af4c503" containerName="route-controller-manager" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.647065 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5019b47-f2d8-439c-9f86-945c6af4c503" containerName="route-controller-manager" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.647187 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5019b47-f2d8-439c-9f86-945c6af4c503" containerName="route-controller-manager" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.647616 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5c8f48f598-95jvr" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.654436 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5c8f48f598-95jvr"] Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.711449 4742 generic.go:334] "Generic (PLEG): container finished" podID="d5019b47-f2d8-439c-9f86-945c6af4c503" containerID="cda502223131d1e623124acf9d4965b09f714dbe60392db16f74d0d540f3ad48" exitCode=0 Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.712135 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-ghwvs" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.712441 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-ghwvs" event={"ID":"d5019b47-f2d8-439c-9f86-945c6af4c503","Type":"ContainerDied","Data":"cda502223131d1e623124acf9d4965b09f714dbe60392db16f74d0d540f3ad48"} Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.712469 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-ghwvs" event={"ID":"d5019b47-f2d8-439c-9f86-945c6af4c503","Type":"ContainerDied","Data":"5bfdb90c8d862c4ab1350e7a85219fb8a0a435bc0873dbff91d266405ef8df2f"} Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.712578 4742 scope.go:117] "RemoveContainer" containerID="cda502223131d1e623124acf9d4965b09f714dbe60392db16f74d0d540f3ad48" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.715892 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d5019b47-f2d8-439c-9f86-945c6af4c503-client-ca\") pod \"d5019b47-f2d8-439c-9f86-945c6af4c503\" (UID: \"d5019b47-f2d8-439c-9f86-945c6af4c503\") " Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.715961 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c78bz\" (UniqueName: \"kubernetes.io/projected/d5019b47-f2d8-439c-9f86-945c6af4c503-kube-api-access-c78bz\") pod \"d5019b47-f2d8-439c-9f86-945c6af4c503\" (UID: \"d5019b47-f2d8-439c-9f86-945c6af4c503\") " Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.715993 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5019b47-f2d8-439c-9f86-945c6af4c503-config\") pod \"d5019b47-f2d8-439c-9f86-945c6af4c503\" (UID: \"d5019b47-f2d8-439c-9f86-945c6af4c503\") " Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.716032 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d5019b47-f2d8-439c-9f86-945c6af4c503-serving-cert\") pod \"d5019b47-f2d8-439c-9f86-945c6af4c503\" (UID: \"d5019b47-f2d8-439c-9f86-945c6af4c503\") " Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.716284 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptn99\" (UniqueName: \"kubernetes.io/projected/33648761-4bb9-4fbf-b5da-734eda122d85-kube-api-access-ptn99\") pod \"route-controller-manager-5c8f48f598-95jvr\" (UID: \"33648761-4bb9-4fbf-b5da-734eda122d85\") " pod="openshift-route-controller-manager/route-controller-manager-5c8f48f598-95jvr" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.716327 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/33648761-4bb9-4fbf-b5da-734eda122d85-client-ca\") pod \"route-controller-manager-5c8f48f598-95jvr\" (UID: \"33648761-4bb9-4fbf-b5da-734eda122d85\") " pod="openshift-route-controller-manager/route-controller-manager-5c8f48f598-95jvr" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.716381 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/33648761-4bb9-4fbf-b5da-734eda122d85-serving-cert\") pod \"route-controller-manager-5c8f48f598-95jvr\" (UID: \"33648761-4bb9-4fbf-b5da-734eda122d85\") " pod="openshift-route-controller-manager/route-controller-manager-5c8f48f598-95jvr" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.716411 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33648761-4bb9-4fbf-b5da-734eda122d85-config\") pod \"route-controller-manager-5c8f48f598-95jvr\" (UID: \"33648761-4bb9-4fbf-b5da-734eda122d85\") " pod="openshift-route-controller-manager/route-controller-manager-5c8f48f598-95jvr" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.716695 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5019b47-f2d8-439c-9f86-945c6af4c503-client-ca" (OuterVolumeSpecName: "client-ca") pod "d5019b47-f2d8-439c-9f86-945c6af4c503" (UID: "d5019b47-f2d8-439c-9f86-945c6af4c503"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.717493 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5019b47-f2d8-439c-9f86-945c6af4c503-config" (OuterVolumeSpecName: "config") pod "d5019b47-f2d8-439c-9f86-945c6af4c503" (UID: "d5019b47-f2d8-439c-9f86-945c6af4c503"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.734936 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5019b47-f2d8-439c-9f86-945c6af4c503-kube-api-access-c78bz" (OuterVolumeSpecName: "kube-api-access-c78bz") pod "d5019b47-f2d8-439c-9f86-945c6af4c503" (UID: "d5019b47-f2d8-439c-9f86-945c6af4c503"). InnerVolumeSpecName "kube-api-access-c78bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.735007 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d5019b47-f2d8-439c-9f86-945c6af4c503-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "d5019b47-f2d8-439c-9f86-945c6af4c503" (UID: "d5019b47-f2d8-439c-9f86-945c6af4c503"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.735909 4742 scope.go:117] "RemoveContainer" containerID="cda502223131d1e623124acf9d4965b09f714dbe60392db16f74d0d540f3ad48" Dec 05 05:57:26 crc kubenswrapper[4742]: E1205 05:57:26.741232 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cda502223131d1e623124acf9d4965b09f714dbe60392db16f74d0d540f3ad48\": container with ID starting with cda502223131d1e623124acf9d4965b09f714dbe60392db16f74d0d540f3ad48 not found: ID does not exist" containerID="cda502223131d1e623124acf9d4965b09f714dbe60392db16f74d0d540f3ad48" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.741275 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cda502223131d1e623124acf9d4965b09f714dbe60392db16f74d0d540f3ad48"} err="failed to get container status \"cda502223131d1e623124acf9d4965b09f714dbe60392db16f74d0d540f3ad48\": rpc error: code = NotFound desc = could not find container \"cda502223131d1e623124acf9d4965b09f714dbe60392db16f74d0d540f3ad48\": container with ID starting with cda502223131d1e623124acf9d4965b09f714dbe60392db16f74d0d540f3ad48 not found: ID does not exist" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.751502 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dms6l" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.751552 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dms6l" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.752413 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-t26cm" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.788496 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dms6l" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.817464 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/33648761-4bb9-4fbf-b5da-734eda122d85-serving-cert\") pod \"route-controller-manager-5c8f48f598-95jvr\" (UID: \"33648761-4bb9-4fbf-b5da-734eda122d85\") " pod="openshift-route-controller-manager/route-controller-manager-5c8f48f598-95jvr" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.817746 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33648761-4bb9-4fbf-b5da-734eda122d85-config\") pod \"route-controller-manager-5c8f48f598-95jvr\" (UID: \"33648761-4bb9-4fbf-b5da-734eda122d85\") " pod="openshift-route-controller-manager/route-controller-manager-5c8f48f598-95jvr" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.817851 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptn99\" (UniqueName: \"kubernetes.io/projected/33648761-4bb9-4fbf-b5da-734eda122d85-kube-api-access-ptn99\") pod \"route-controller-manager-5c8f48f598-95jvr\" (UID: \"33648761-4bb9-4fbf-b5da-734eda122d85\") " pod="openshift-route-controller-manager/route-controller-manager-5c8f48f598-95jvr" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.817940 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/33648761-4bb9-4fbf-b5da-734eda122d85-client-ca\") pod \"route-controller-manager-5c8f48f598-95jvr\" (UID: \"33648761-4bb9-4fbf-b5da-734eda122d85\") " pod="openshift-route-controller-manager/route-controller-manager-5c8f48f598-95jvr" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.818086 4742 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d5019b47-f2d8-439c-9f86-945c6af4c503-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.818164 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c78bz\" (UniqueName: \"kubernetes.io/projected/d5019b47-f2d8-439c-9f86-945c6af4c503-kube-api-access-c78bz\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.818231 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5019b47-f2d8-439c-9f86-945c6af4c503-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.818292 4742 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d5019b47-f2d8-439c-9f86-945c6af4c503-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.818976 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/33648761-4bb9-4fbf-b5da-734eda122d85-client-ca\") pod \"route-controller-manager-5c8f48f598-95jvr\" (UID: \"33648761-4bb9-4fbf-b5da-734eda122d85\") " pod="openshift-route-controller-manager/route-controller-manager-5c8f48f598-95jvr" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.819049 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33648761-4bb9-4fbf-b5da-734eda122d85-config\") pod \"route-controller-manager-5c8f48f598-95jvr\" (UID: \"33648761-4bb9-4fbf-b5da-734eda122d85\") " pod="openshift-route-controller-manager/route-controller-manager-5c8f48f598-95jvr" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.821412 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/33648761-4bb9-4fbf-b5da-734eda122d85-serving-cert\") pod \"route-controller-manager-5c8f48f598-95jvr\" (UID: \"33648761-4bb9-4fbf-b5da-734eda122d85\") " pod="openshift-route-controller-manager/route-controller-manager-5c8f48f598-95jvr" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.836410 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptn99\" (UniqueName: \"kubernetes.io/projected/33648761-4bb9-4fbf-b5da-734eda122d85-kube-api-access-ptn99\") pod \"route-controller-manager-5c8f48f598-95jvr\" (UID: \"33648761-4bb9-4fbf-b5da-734eda122d85\") " pod="openshift-route-controller-manager/route-controller-manager-5c8f48f598-95jvr" Dec 05 05:57:26 crc kubenswrapper[4742]: I1205 05:57:26.991583 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5c8f48f598-95jvr" Dec 05 05:57:27 crc kubenswrapper[4742]: I1205 05:57:27.052899 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6c4db7d847-ghwvs"] Dec 05 05:57:27 crc kubenswrapper[4742]: I1205 05:57:27.055600 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6c4db7d847-ghwvs"] Dec 05 05:57:27 crc kubenswrapper[4742]: I1205 05:57:27.378482 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5c8f48f598-95jvr"] Dec 05 05:57:27 crc kubenswrapper[4742]: W1205 05:57:27.388996 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod33648761_4bb9_4fbf_b5da_734eda122d85.slice/crio-1ef4d3311bac9823dcedeada09566c267dc9bcc04a54e2b36d1a4fb1b43bc943 WatchSource:0}: Error finding container 1ef4d3311bac9823dcedeada09566c267dc9bcc04a54e2b36d1a4fb1b43bc943: Status 404 returned error can't find the container with id 1ef4d3311bac9823dcedeada09566c267dc9bcc04a54e2b36d1a4fb1b43bc943 Dec 05 05:57:27 crc kubenswrapper[4742]: I1205 05:57:27.718741 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5c8f48f598-95jvr" event={"ID":"33648761-4bb9-4fbf-b5da-734eda122d85","Type":"ContainerStarted","Data":"1ef4d3311bac9823dcedeada09566c267dc9bcc04a54e2b36d1a4fb1b43bc943"} Dec 05 05:57:27 crc kubenswrapper[4742]: I1205 05:57:27.759364 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dms6l" Dec 05 05:57:28 crc kubenswrapper[4742]: I1205 05:57:28.391322 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d5019b47-f2d8-439c-9f86-945c6af4c503" path="/var/lib/kubelet/pods/d5019b47-f2d8-439c-9f86-945c6af4c503/volumes" Dec 05 05:57:28 crc kubenswrapper[4742]: I1205 05:57:28.724643 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5c8f48f598-95jvr" event={"ID":"33648761-4bb9-4fbf-b5da-734eda122d85","Type":"ContainerStarted","Data":"dd184ff3b9350b463d77a627ef533ae995668303db62b6c2d3fef54c4d28a559"} Dec 05 05:57:28 crc kubenswrapper[4742]: I1205 05:57:28.724952 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5c8f48f598-95jvr" Dec 05 05:57:28 crc kubenswrapper[4742]: I1205 05:57:28.740695 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5c8f48f598-95jvr" podStartSLOduration=4.7406815909999995 podStartE2EDuration="4.740681591s" podCreationTimestamp="2025-12-05 05:57:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:57:28.739143758 +0000 UTC m=+324.651278820" watchObservedRunningTime="2025-12-05 05:57:28.740681591 +0000 UTC m=+324.652816653" Dec 05 05:57:28 crc kubenswrapper[4742]: I1205 05:57:28.771699 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5c8f48f598-95jvr" Dec 05 05:57:29 crc kubenswrapper[4742]: I1205 05:57:29.123696 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nk79d" Dec 05 05:57:29 crc kubenswrapper[4742]: I1205 05:57:29.123767 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nk79d" Dec 05 05:57:29 crc kubenswrapper[4742]: I1205 05:57:29.168305 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nk79d" Dec 05 05:57:29 crc kubenswrapper[4742]: I1205 05:57:29.312710 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-wpdk5" Dec 05 05:57:29 crc kubenswrapper[4742]: I1205 05:57:29.313047 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-wpdk5" Dec 05 05:57:29 crc kubenswrapper[4742]: I1205 05:57:29.806294 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nk79d" Dec 05 05:57:30 crc kubenswrapper[4742]: I1205 05:57:30.347986 4742 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-wpdk5" podUID="c54462f3-33a2-4bf4-9601-5a321e633702" containerName="registry-server" probeResult="failure" output=< Dec 05 05:57:30 crc kubenswrapper[4742]: timeout: failed to connect service ":50051" within 1s Dec 05 05:57:30 crc kubenswrapper[4742]: > Dec 05 05:57:39 crc kubenswrapper[4742]: I1205 05:57:39.377899 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-wpdk5" Dec 05 05:57:39 crc kubenswrapper[4742]: I1205 05:57:39.448228 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-wpdk5" Dec 05 05:57:47 crc kubenswrapper[4742]: I1205 05:57:47.275703 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-fwkcf"] Dec 05 05:57:47 crc kubenswrapper[4742]: I1205 05:57:47.277617 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-fwkcf" Dec 05 05:57:47 crc kubenswrapper[4742]: I1205 05:57:47.299991 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-fwkcf"] Dec 05 05:57:47 crc kubenswrapper[4742]: I1205 05:57:47.323585 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/dca3dd13-19cb-4d0b-9be6-bda811060943-installation-pull-secrets\") pod \"image-registry-66df7c8f76-fwkcf\" (UID: \"dca3dd13-19cb-4d0b-9be6-bda811060943\") " pod="openshift-image-registry/image-registry-66df7c8f76-fwkcf" Dec 05 05:57:47 crc kubenswrapper[4742]: I1205 05:57:47.323630 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tmgm\" (UniqueName: \"kubernetes.io/projected/dca3dd13-19cb-4d0b-9be6-bda811060943-kube-api-access-7tmgm\") pod \"image-registry-66df7c8f76-fwkcf\" (UID: \"dca3dd13-19cb-4d0b-9be6-bda811060943\") " pod="openshift-image-registry/image-registry-66df7c8f76-fwkcf" Dec 05 05:57:47 crc kubenswrapper[4742]: I1205 05:57:47.323672 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-fwkcf\" (UID: \"dca3dd13-19cb-4d0b-9be6-bda811060943\") " pod="openshift-image-registry/image-registry-66df7c8f76-fwkcf" Dec 05 05:57:47 crc kubenswrapper[4742]: I1205 05:57:47.323730 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/dca3dd13-19cb-4d0b-9be6-bda811060943-registry-tls\") pod \"image-registry-66df7c8f76-fwkcf\" (UID: \"dca3dd13-19cb-4d0b-9be6-bda811060943\") " pod="openshift-image-registry/image-registry-66df7c8f76-fwkcf" Dec 05 05:57:47 crc kubenswrapper[4742]: I1205 05:57:47.323763 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/dca3dd13-19cb-4d0b-9be6-bda811060943-bound-sa-token\") pod \"image-registry-66df7c8f76-fwkcf\" (UID: \"dca3dd13-19cb-4d0b-9be6-bda811060943\") " pod="openshift-image-registry/image-registry-66df7c8f76-fwkcf" Dec 05 05:57:47 crc kubenswrapper[4742]: I1205 05:57:47.323797 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/dca3dd13-19cb-4d0b-9be6-bda811060943-registry-certificates\") pod \"image-registry-66df7c8f76-fwkcf\" (UID: \"dca3dd13-19cb-4d0b-9be6-bda811060943\") " pod="openshift-image-registry/image-registry-66df7c8f76-fwkcf" Dec 05 05:57:47 crc kubenswrapper[4742]: I1205 05:57:47.323850 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/dca3dd13-19cb-4d0b-9be6-bda811060943-trusted-ca\") pod \"image-registry-66df7c8f76-fwkcf\" (UID: \"dca3dd13-19cb-4d0b-9be6-bda811060943\") " pod="openshift-image-registry/image-registry-66df7c8f76-fwkcf" Dec 05 05:57:47 crc kubenswrapper[4742]: I1205 05:57:47.323889 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/dca3dd13-19cb-4d0b-9be6-bda811060943-ca-trust-extracted\") pod \"image-registry-66df7c8f76-fwkcf\" (UID: \"dca3dd13-19cb-4d0b-9be6-bda811060943\") " pod="openshift-image-registry/image-registry-66df7c8f76-fwkcf" Dec 05 05:57:47 crc kubenswrapper[4742]: I1205 05:57:47.349341 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-fwkcf\" (UID: \"dca3dd13-19cb-4d0b-9be6-bda811060943\") " pod="openshift-image-registry/image-registry-66df7c8f76-fwkcf" Dec 05 05:57:47 crc kubenswrapper[4742]: I1205 05:57:47.425252 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/dca3dd13-19cb-4d0b-9be6-bda811060943-registry-tls\") pod \"image-registry-66df7c8f76-fwkcf\" (UID: \"dca3dd13-19cb-4d0b-9be6-bda811060943\") " pod="openshift-image-registry/image-registry-66df7c8f76-fwkcf" Dec 05 05:57:47 crc kubenswrapper[4742]: I1205 05:57:47.425290 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/dca3dd13-19cb-4d0b-9be6-bda811060943-bound-sa-token\") pod \"image-registry-66df7c8f76-fwkcf\" (UID: \"dca3dd13-19cb-4d0b-9be6-bda811060943\") " pod="openshift-image-registry/image-registry-66df7c8f76-fwkcf" Dec 05 05:57:47 crc kubenswrapper[4742]: I1205 05:57:47.425330 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/dca3dd13-19cb-4d0b-9be6-bda811060943-registry-certificates\") pod \"image-registry-66df7c8f76-fwkcf\" (UID: \"dca3dd13-19cb-4d0b-9be6-bda811060943\") " pod="openshift-image-registry/image-registry-66df7c8f76-fwkcf" Dec 05 05:57:47 crc kubenswrapper[4742]: I1205 05:57:47.425352 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/dca3dd13-19cb-4d0b-9be6-bda811060943-trusted-ca\") pod \"image-registry-66df7c8f76-fwkcf\" (UID: \"dca3dd13-19cb-4d0b-9be6-bda811060943\") " pod="openshift-image-registry/image-registry-66df7c8f76-fwkcf" Dec 05 05:57:47 crc kubenswrapper[4742]: I1205 05:57:47.425388 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/dca3dd13-19cb-4d0b-9be6-bda811060943-ca-trust-extracted\") pod \"image-registry-66df7c8f76-fwkcf\" (UID: \"dca3dd13-19cb-4d0b-9be6-bda811060943\") " pod="openshift-image-registry/image-registry-66df7c8f76-fwkcf" Dec 05 05:57:47 crc kubenswrapper[4742]: I1205 05:57:47.425424 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/dca3dd13-19cb-4d0b-9be6-bda811060943-installation-pull-secrets\") pod \"image-registry-66df7c8f76-fwkcf\" (UID: \"dca3dd13-19cb-4d0b-9be6-bda811060943\") " pod="openshift-image-registry/image-registry-66df7c8f76-fwkcf" Dec 05 05:57:47 crc kubenswrapper[4742]: I1205 05:57:47.425447 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tmgm\" (UniqueName: \"kubernetes.io/projected/dca3dd13-19cb-4d0b-9be6-bda811060943-kube-api-access-7tmgm\") pod \"image-registry-66df7c8f76-fwkcf\" (UID: \"dca3dd13-19cb-4d0b-9be6-bda811060943\") " pod="openshift-image-registry/image-registry-66df7c8f76-fwkcf" Dec 05 05:57:47 crc kubenswrapper[4742]: I1205 05:57:47.428114 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/dca3dd13-19cb-4d0b-9be6-bda811060943-ca-trust-extracted\") pod \"image-registry-66df7c8f76-fwkcf\" (UID: \"dca3dd13-19cb-4d0b-9be6-bda811060943\") " pod="openshift-image-registry/image-registry-66df7c8f76-fwkcf" Dec 05 05:57:47 crc kubenswrapper[4742]: I1205 05:57:47.428769 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/dca3dd13-19cb-4d0b-9be6-bda811060943-trusted-ca\") pod \"image-registry-66df7c8f76-fwkcf\" (UID: \"dca3dd13-19cb-4d0b-9be6-bda811060943\") " pod="openshift-image-registry/image-registry-66df7c8f76-fwkcf" Dec 05 05:57:47 crc kubenswrapper[4742]: I1205 05:57:47.429340 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/dca3dd13-19cb-4d0b-9be6-bda811060943-registry-certificates\") pod \"image-registry-66df7c8f76-fwkcf\" (UID: \"dca3dd13-19cb-4d0b-9be6-bda811060943\") " pod="openshift-image-registry/image-registry-66df7c8f76-fwkcf" Dec 05 05:57:47 crc kubenswrapper[4742]: I1205 05:57:47.433669 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/dca3dd13-19cb-4d0b-9be6-bda811060943-installation-pull-secrets\") pod \"image-registry-66df7c8f76-fwkcf\" (UID: \"dca3dd13-19cb-4d0b-9be6-bda811060943\") " pod="openshift-image-registry/image-registry-66df7c8f76-fwkcf" Dec 05 05:57:47 crc kubenswrapper[4742]: I1205 05:57:47.434209 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/dca3dd13-19cb-4d0b-9be6-bda811060943-registry-tls\") pod \"image-registry-66df7c8f76-fwkcf\" (UID: \"dca3dd13-19cb-4d0b-9be6-bda811060943\") " pod="openshift-image-registry/image-registry-66df7c8f76-fwkcf" Dec 05 05:57:47 crc kubenswrapper[4742]: I1205 05:57:47.441641 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tmgm\" (UniqueName: \"kubernetes.io/projected/dca3dd13-19cb-4d0b-9be6-bda811060943-kube-api-access-7tmgm\") pod \"image-registry-66df7c8f76-fwkcf\" (UID: \"dca3dd13-19cb-4d0b-9be6-bda811060943\") " pod="openshift-image-registry/image-registry-66df7c8f76-fwkcf" Dec 05 05:57:47 crc kubenswrapper[4742]: I1205 05:57:47.451953 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/dca3dd13-19cb-4d0b-9be6-bda811060943-bound-sa-token\") pod \"image-registry-66df7c8f76-fwkcf\" (UID: \"dca3dd13-19cb-4d0b-9be6-bda811060943\") " pod="openshift-image-registry/image-registry-66df7c8f76-fwkcf" Dec 05 05:57:47 crc kubenswrapper[4742]: I1205 05:57:47.603989 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-fwkcf" Dec 05 05:57:47 crc kubenswrapper[4742]: I1205 05:57:47.859802 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-fwkcf"] Dec 05 05:57:47 crc kubenswrapper[4742]: W1205 05:57:47.868118 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddca3dd13_19cb_4d0b_9be6_bda811060943.slice/crio-d50eee6dc6513ac13ed5b524d7f17b4ffe41b7f433728f147425813b09d36c4d WatchSource:0}: Error finding container d50eee6dc6513ac13ed5b524d7f17b4ffe41b7f433728f147425813b09d36c4d: Status 404 returned error can't find the container with id d50eee6dc6513ac13ed5b524d7f17b4ffe41b7f433728f147425813b09d36c4d Dec 05 05:57:48 crc kubenswrapper[4742]: I1205 05:57:48.853769 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-fwkcf" event={"ID":"dca3dd13-19cb-4d0b-9be6-bda811060943","Type":"ContainerStarted","Data":"e7586b9d0339162e4ca1d885fa0508adf81625ecf5ba1c29297bb8aba5c30679"} Dec 05 05:57:48 crc kubenswrapper[4742]: I1205 05:57:48.854279 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-fwkcf" Dec 05 05:57:48 crc kubenswrapper[4742]: I1205 05:57:48.854307 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-fwkcf" event={"ID":"dca3dd13-19cb-4d0b-9be6-bda811060943","Type":"ContainerStarted","Data":"d50eee6dc6513ac13ed5b524d7f17b4ffe41b7f433728f147425813b09d36c4d"} Dec 05 05:57:48 crc kubenswrapper[4742]: I1205 05:57:48.887440 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-fwkcf" podStartSLOduration=1.887413842 podStartE2EDuration="1.887413842s" podCreationTimestamp="2025-12-05 05:57:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:57:48.882091724 +0000 UTC m=+344.794226846" watchObservedRunningTime="2025-12-05 05:57:48.887413842 +0000 UTC m=+344.799548944" Dec 05 05:58:07 crc kubenswrapper[4742]: I1205 05:58:07.610465 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-fwkcf" Dec 05 05:58:07 crc kubenswrapper[4742]: I1205 05:58:07.686046 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-mp2sf"] Dec 05 05:58:16 crc kubenswrapper[4742]: I1205 05:58:16.670913 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:58:16 crc kubenswrapper[4742]: I1205 05:58:16.671632 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:58:17 crc kubenswrapper[4742]: I1205 05:58:17.322985 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5c8f48f598-95jvr"] Dec 05 05:58:17 crc kubenswrapper[4742]: I1205 05:58:17.323253 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-5c8f48f598-95jvr" podUID="33648761-4bb9-4fbf-b5da-734eda122d85" containerName="route-controller-manager" containerID="cri-o://dd184ff3b9350b463d77a627ef533ae995668303db62b6c2d3fef54c4d28a559" gracePeriod=30 Dec 05 05:58:17 crc kubenswrapper[4742]: I1205 05:58:17.672130 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5c8f48f598-95jvr" Dec 05 05:58:17 crc kubenswrapper[4742]: I1205 05:58:17.704460 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33648761-4bb9-4fbf-b5da-734eda122d85-config\") pod \"33648761-4bb9-4fbf-b5da-734eda122d85\" (UID: \"33648761-4bb9-4fbf-b5da-734eda122d85\") " Dec 05 05:58:17 crc kubenswrapper[4742]: I1205 05:58:17.704541 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/33648761-4bb9-4fbf-b5da-734eda122d85-serving-cert\") pod \"33648761-4bb9-4fbf-b5da-734eda122d85\" (UID: \"33648761-4bb9-4fbf-b5da-734eda122d85\") " Dec 05 05:58:17 crc kubenswrapper[4742]: I1205 05:58:17.704575 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/33648761-4bb9-4fbf-b5da-734eda122d85-client-ca\") pod \"33648761-4bb9-4fbf-b5da-734eda122d85\" (UID: \"33648761-4bb9-4fbf-b5da-734eda122d85\") " Dec 05 05:58:17 crc kubenswrapper[4742]: I1205 05:58:17.704638 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ptn99\" (UniqueName: \"kubernetes.io/projected/33648761-4bb9-4fbf-b5da-734eda122d85-kube-api-access-ptn99\") pod \"33648761-4bb9-4fbf-b5da-734eda122d85\" (UID: \"33648761-4bb9-4fbf-b5da-734eda122d85\") " Dec 05 05:58:17 crc kubenswrapper[4742]: I1205 05:58:17.705309 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33648761-4bb9-4fbf-b5da-734eda122d85-client-ca" (OuterVolumeSpecName: "client-ca") pod "33648761-4bb9-4fbf-b5da-734eda122d85" (UID: "33648761-4bb9-4fbf-b5da-734eda122d85"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:58:17 crc kubenswrapper[4742]: I1205 05:58:17.705326 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33648761-4bb9-4fbf-b5da-734eda122d85-config" (OuterVolumeSpecName: "config") pod "33648761-4bb9-4fbf-b5da-734eda122d85" (UID: "33648761-4bb9-4fbf-b5da-734eda122d85"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:58:17 crc kubenswrapper[4742]: I1205 05:58:17.710190 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33648761-4bb9-4fbf-b5da-734eda122d85-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "33648761-4bb9-4fbf-b5da-734eda122d85" (UID: "33648761-4bb9-4fbf-b5da-734eda122d85"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:58:17 crc kubenswrapper[4742]: I1205 05:58:17.711365 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33648761-4bb9-4fbf-b5da-734eda122d85-kube-api-access-ptn99" (OuterVolumeSpecName: "kube-api-access-ptn99") pod "33648761-4bb9-4fbf-b5da-734eda122d85" (UID: "33648761-4bb9-4fbf-b5da-734eda122d85"). InnerVolumeSpecName "kube-api-access-ptn99". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:58:17 crc kubenswrapper[4742]: I1205 05:58:17.806048 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33648761-4bb9-4fbf-b5da-734eda122d85-config\") on node \"crc\" DevicePath \"\"" Dec 05 05:58:17 crc kubenswrapper[4742]: I1205 05:58:17.806095 4742 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/33648761-4bb9-4fbf-b5da-734eda122d85-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 05:58:17 crc kubenswrapper[4742]: I1205 05:58:17.806106 4742 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/33648761-4bb9-4fbf-b5da-734eda122d85-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:58:17 crc kubenswrapper[4742]: I1205 05:58:17.806115 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ptn99\" (UniqueName: \"kubernetes.io/projected/33648761-4bb9-4fbf-b5da-734eda122d85-kube-api-access-ptn99\") on node \"crc\" DevicePath \"\"" Dec 05 05:58:18 crc kubenswrapper[4742]: I1205 05:58:18.033744 4742 generic.go:334] "Generic (PLEG): container finished" podID="33648761-4bb9-4fbf-b5da-734eda122d85" containerID="dd184ff3b9350b463d77a627ef533ae995668303db62b6c2d3fef54c4d28a559" exitCode=0 Dec 05 05:58:18 crc kubenswrapper[4742]: I1205 05:58:18.033780 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5c8f48f598-95jvr" event={"ID":"33648761-4bb9-4fbf-b5da-734eda122d85","Type":"ContainerDied","Data":"dd184ff3b9350b463d77a627ef533ae995668303db62b6c2d3fef54c4d28a559"} Dec 05 05:58:18 crc kubenswrapper[4742]: I1205 05:58:18.033801 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5c8f48f598-95jvr" event={"ID":"33648761-4bb9-4fbf-b5da-734eda122d85","Type":"ContainerDied","Data":"1ef4d3311bac9823dcedeada09566c267dc9bcc04a54e2b36d1a4fb1b43bc943"} Dec 05 05:58:18 crc kubenswrapper[4742]: I1205 05:58:18.033816 4742 scope.go:117] "RemoveContainer" containerID="dd184ff3b9350b463d77a627ef533ae995668303db62b6c2d3fef54c4d28a559" Dec 05 05:58:18 crc kubenswrapper[4742]: I1205 05:58:18.033863 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5c8f48f598-95jvr" Dec 05 05:58:18 crc kubenswrapper[4742]: I1205 05:58:18.061261 4742 scope.go:117] "RemoveContainer" containerID="dd184ff3b9350b463d77a627ef533ae995668303db62b6c2d3fef54c4d28a559" Dec 05 05:58:18 crc kubenswrapper[4742]: E1205 05:58:18.061873 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd184ff3b9350b463d77a627ef533ae995668303db62b6c2d3fef54c4d28a559\": container with ID starting with dd184ff3b9350b463d77a627ef533ae995668303db62b6c2d3fef54c4d28a559 not found: ID does not exist" containerID="dd184ff3b9350b463d77a627ef533ae995668303db62b6c2d3fef54c4d28a559" Dec 05 05:58:18 crc kubenswrapper[4742]: I1205 05:58:18.061957 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd184ff3b9350b463d77a627ef533ae995668303db62b6c2d3fef54c4d28a559"} err="failed to get container status \"dd184ff3b9350b463d77a627ef533ae995668303db62b6c2d3fef54c4d28a559\": rpc error: code = NotFound desc = could not find container \"dd184ff3b9350b463d77a627ef533ae995668303db62b6c2d3fef54c4d28a559\": container with ID starting with dd184ff3b9350b463d77a627ef533ae995668303db62b6c2d3fef54c4d28a559 not found: ID does not exist" Dec 05 05:58:18 crc kubenswrapper[4742]: I1205 05:58:18.080562 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5c8f48f598-95jvr"] Dec 05 05:58:18 crc kubenswrapper[4742]: I1205 05:58:18.084826 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5c8f48f598-95jvr"] Dec 05 05:58:18 crc kubenswrapper[4742]: I1205 05:58:18.394988 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33648761-4bb9-4fbf-b5da-734eda122d85" path="/var/lib/kubelet/pods/33648761-4bb9-4fbf-b5da-734eda122d85/volumes" Dec 05 05:58:19 crc kubenswrapper[4742]: I1205 05:58:19.210773 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6c4db7d847-fx6k6"] Dec 05 05:58:19 crc kubenswrapper[4742]: E1205 05:58:19.211120 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33648761-4bb9-4fbf-b5da-734eda122d85" containerName="route-controller-manager" Dec 05 05:58:19 crc kubenswrapper[4742]: I1205 05:58:19.211142 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="33648761-4bb9-4fbf-b5da-734eda122d85" containerName="route-controller-manager" Dec 05 05:58:19 crc kubenswrapper[4742]: I1205 05:58:19.211323 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="33648761-4bb9-4fbf-b5da-734eda122d85" containerName="route-controller-manager" Dec 05 05:58:19 crc kubenswrapper[4742]: I1205 05:58:19.211941 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-fx6k6" Dec 05 05:58:19 crc kubenswrapper[4742]: I1205 05:58:19.215700 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 05:58:19 crc kubenswrapper[4742]: I1205 05:58:19.216376 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 05:58:19 crc kubenswrapper[4742]: I1205 05:58:19.216660 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 05:58:19 crc kubenswrapper[4742]: I1205 05:58:19.216975 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 05:58:19 crc kubenswrapper[4742]: I1205 05:58:19.217148 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 05:58:19 crc kubenswrapper[4742]: I1205 05:58:19.217268 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 05:58:19 crc kubenswrapper[4742]: I1205 05:58:19.225304 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7b4ea290-5f49-4ad7-b302-54413c4561f2-serving-cert\") pod \"route-controller-manager-6c4db7d847-fx6k6\" (UID: \"7b4ea290-5f49-4ad7-b302-54413c4561f2\") " pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-fx6k6" Dec 05 05:58:19 crc kubenswrapper[4742]: I1205 05:58:19.225382 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7b4ea290-5f49-4ad7-b302-54413c4561f2-client-ca\") pod \"route-controller-manager-6c4db7d847-fx6k6\" (UID: \"7b4ea290-5f49-4ad7-b302-54413c4561f2\") " pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-fx6k6" Dec 05 05:58:19 crc kubenswrapper[4742]: I1205 05:58:19.225404 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9wh9q\" (UniqueName: \"kubernetes.io/projected/7b4ea290-5f49-4ad7-b302-54413c4561f2-kube-api-access-9wh9q\") pod \"route-controller-manager-6c4db7d847-fx6k6\" (UID: \"7b4ea290-5f49-4ad7-b302-54413c4561f2\") " pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-fx6k6" Dec 05 05:58:19 crc kubenswrapper[4742]: I1205 05:58:19.225430 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b4ea290-5f49-4ad7-b302-54413c4561f2-config\") pod \"route-controller-manager-6c4db7d847-fx6k6\" (UID: \"7b4ea290-5f49-4ad7-b302-54413c4561f2\") " pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-fx6k6" Dec 05 05:58:19 crc kubenswrapper[4742]: I1205 05:58:19.248493 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6c4db7d847-fx6k6"] Dec 05 05:58:19 crc kubenswrapper[4742]: I1205 05:58:19.326421 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7b4ea290-5f49-4ad7-b302-54413c4561f2-serving-cert\") pod \"route-controller-manager-6c4db7d847-fx6k6\" (UID: \"7b4ea290-5f49-4ad7-b302-54413c4561f2\") " pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-fx6k6" Dec 05 05:58:19 crc kubenswrapper[4742]: I1205 05:58:19.326509 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7b4ea290-5f49-4ad7-b302-54413c4561f2-client-ca\") pod \"route-controller-manager-6c4db7d847-fx6k6\" (UID: \"7b4ea290-5f49-4ad7-b302-54413c4561f2\") " pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-fx6k6" Dec 05 05:58:19 crc kubenswrapper[4742]: I1205 05:58:19.326540 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9wh9q\" (UniqueName: \"kubernetes.io/projected/7b4ea290-5f49-4ad7-b302-54413c4561f2-kube-api-access-9wh9q\") pod \"route-controller-manager-6c4db7d847-fx6k6\" (UID: \"7b4ea290-5f49-4ad7-b302-54413c4561f2\") " pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-fx6k6" Dec 05 05:58:19 crc kubenswrapper[4742]: I1205 05:58:19.326575 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b4ea290-5f49-4ad7-b302-54413c4561f2-config\") pod \"route-controller-manager-6c4db7d847-fx6k6\" (UID: \"7b4ea290-5f49-4ad7-b302-54413c4561f2\") " pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-fx6k6" Dec 05 05:58:19 crc kubenswrapper[4742]: I1205 05:58:19.328187 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7b4ea290-5f49-4ad7-b302-54413c4561f2-client-ca\") pod \"route-controller-manager-6c4db7d847-fx6k6\" (UID: \"7b4ea290-5f49-4ad7-b302-54413c4561f2\") " pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-fx6k6" Dec 05 05:58:19 crc kubenswrapper[4742]: I1205 05:58:19.328378 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b4ea290-5f49-4ad7-b302-54413c4561f2-config\") pod \"route-controller-manager-6c4db7d847-fx6k6\" (UID: \"7b4ea290-5f49-4ad7-b302-54413c4561f2\") " pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-fx6k6" Dec 05 05:58:19 crc kubenswrapper[4742]: I1205 05:58:19.345208 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7b4ea290-5f49-4ad7-b302-54413c4561f2-serving-cert\") pod \"route-controller-manager-6c4db7d847-fx6k6\" (UID: \"7b4ea290-5f49-4ad7-b302-54413c4561f2\") " pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-fx6k6" Dec 05 05:58:19 crc kubenswrapper[4742]: I1205 05:58:19.346574 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9wh9q\" (UniqueName: \"kubernetes.io/projected/7b4ea290-5f49-4ad7-b302-54413c4561f2-kube-api-access-9wh9q\") pod \"route-controller-manager-6c4db7d847-fx6k6\" (UID: \"7b4ea290-5f49-4ad7-b302-54413c4561f2\") " pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-fx6k6" Dec 05 05:58:19 crc kubenswrapper[4742]: I1205 05:58:19.566777 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-fx6k6" Dec 05 05:58:20 crc kubenswrapper[4742]: I1205 05:58:20.002214 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6c4db7d847-fx6k6"] Dec 05 05:58:20 crc kubenswrapper[4742]: I1205 05:58:20.050503 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-fx6k6" event={"ID":"7b4ea290-5f49-4ad7-b302-54413c4561f2","Type":"ContainerStarted","Data":"47cfd99720e592a79411c5dfe922c0fcb3d90e4a64f6465a92a4ef622063a0b2"} Dec 05 05:58:21 crc kubenswrapper[4742]: I1205 05:58:21.057623 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-fx6k6" event={"ID":"7b4ea290-5f49-4ad7-b302-54413c4561f2","Type":"ContainerStarted","Data":"52480cff1ede85d827de89c2bb083435989c0709159f0ad3d39c04520ffe07b1"} Dec 05 05:58:21 crc kubenswrapper[4742]: I1205 05:58:21.058117 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-fx6k6" Dec 05 05:58:21 crc kubenswrapper[4742]: I1205 05:58:21.066016 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-fx6k6" Dec 05 05:58:21 crc kubenswrapper[4742]: I1205 05:58:21.081210 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6c4db7d847-fx6k6" podStartSLOduration=4.081183397 podStartE2EDuration="4.081183397s" podCreationTimestamp="2025-12-05 05:58:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 05:58:21.077319051 +0000 UTC m=+376.989454193" watchObservedRunningTime="2025-12-05 05:58:21.081183397 +0000 UTC m=+376.993318499" Dec 05 05:58:32 crc kubenswrapper[4742]: I1205 05:58:32.742219 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" podUID="e9d19072-f6f9-42da-8b86-5d6bff4b340c" containerName="registry" containerID="cri-o://e28ee2b267b42d0dd0e560f8a11a748df5c6bf5fed2973fcd2b5731f66cd2513" gracePeriod=30 Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.236822 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.319924 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e9d19072-f6f9-42da-8b86-5d6bff4b340c-registry-tls\") pod \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.320117 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e9d19072-f6f9-42da-8b86-5d6bff4b340c-ca-trust-extracted\") pod \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.320263 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-28kw5\" (UniqueName: \"kubernetes.io/projected/e9d19072-f6f9-42da-8b86-5d6bff4b340c-kube-api-access-28kw5\") pod \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.320321 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e9d19072-f6f9-42da-8b86-5d6bff4b340c-installation-pull-secrets\") pod \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.320404 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e9d19072-f6f9-42da-8b86-5d6bff4b340c-bound-sa-token\") pod \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.320500 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e9d19072-f6f9-42da-8b86-5d6bff4b340c-trusted-ca\") pod \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.320607 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e9d19072-f6f9-42da-8b86-5d6bff4b340c-registry-certificates\") pod \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.320826 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\" (UID: \"e9d19072-f6f9-42da-8b86-5d6bff4b340c\") " Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.322013 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9d19072-f6f9-42da-8b86-5d6bff4b340c-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "e9d19072-f6f9-42da-8b86-5d6bff4b340c" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.322543 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9d19072-f6f9-42da-8b86-5d6bff4b340c-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "e9d19072-f6f9-42da-8b86-5d6bff4b340c" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.327686 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9d19072-f6f9-42da-8b86-5d6bff4b340c-kube-api-access-28kw5" (OuterVolumeSpecName: "kube-api-access-28kw5") pod "e9d19072-f6f9-42da-8b86-5d6bff4b340c" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c"). InnerVolumeSpecName "kube-api-access-28kw5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.328957 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9d19072-f6f9-42da-8b86-5d6bff4b340c-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "e9d19072-f6f9-42da-8b86-5d6bff4b340c" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.329509 4742 generic.go:334] "Generic (PLEG): container finished" podID="e9d19072-f6f9-42da-8b86-5d6bff4b340c" containerID="e28ee2b267b42d0dd0e560f8a11a748df5c6bf5fed2973fcd2b5731f66cd2513" exitCode=0 Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.329560 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" event={"ID":"e9d19072-f6f9-42da-8b86-5d6bff4b340c","Type":"ContainerDied","Data":"e28ee2b267b42d0dd0e560f8a11a748df5c6bf5fed2973fcd2b5731f66cd2513"} Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.329599 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" event={"ID":"e9d19072-f6f9-42da-8b86-5d6bff4b340c","Type":"ContainerDied","Data":"72cc8594c443119241f9d5001fa69f1d4c6fb97841032b078fdafb3affa8142d"} Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.329620 4742 scope.go:117] "RemoveContainer" containerID="e28ee2b267b42d0dd0e560f8a11a748df5c6bf5fed2973fcd2b5731f66cd2513" Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.329704 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9d19072-f6f9-42da-8b86-5d6bff4b340c-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "e9d19072-f6f9-42da-8b86-5d6bff4b340c" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.330094 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-mp2sf" Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.330803 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9d19072-f6f9-42da-8b86-5d6bff4b340c-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "e9d19072-f6f9-42da-8b86-5d6bff4b340c" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.346385 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "e9d19072-f6f9-42da-8b86-5d6bff4b340c" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.351415 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e9d19072-f6f9-42da-8b86-5d6bff4b340c-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "e9d19072-f6f9-42da-8b86-5d6bff4b340c" (UID: "e9d19072-f6f9-42da-8b86-5d6bff4b340c"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.382732 4742 scope.go:117] "RemoveContainer" containerID="e28ee2b267b42d0dd0e560f8a11a748df5c6bf5fed2973fcd2b5731f66cd2513" Dec 05 05:58:33 crc kubenswrapper[4742]: E1205 05:58:33.383373 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e28ee2b267b42d0dd0e560f8a11a748df5c6bf5fed2973fcd2b5731f66cd2513\": container with ID starting with e28ee2b267b42d0dd0e560f8a11a748df5c6bf5fed2973fcd2b5731f66cd2513 not found: ID does not exist" containerID="e28ee2b267b42d0dd0e560f8a11a748df5c6bf5fed2973fcd2b5731f66cd2513" Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.383430 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e28ee2b267b42d0dd0e560f8a11a748df5c6bf5fed2973fcd2b5731f66cd2513"} err="failed to get container status \"e28ee2b267b42d0dd0e560f8a11a748df5c6bf5fed2973fcd2b5731f66cd2513\": rpc error: code = NotFound desc = could not find container \"e28ee2b267b42d0dd0e560f8a11a748df5c6bf5fed2973fcd2b5731f66cd2513\": container with ID starting with e28ee2b267b42d0dd0e560f8a11a748df5c6bf5fed2973fcd2b5731f66cd2513 not found: ID does not exist" Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.422594 4742 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e9d19072-f6f9-42da-8b86-5d6bff4b340c-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.422632 4742 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e9d19072-f6f9-42da-8b86-5d6bff4b340c-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.422646 4742 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e9d19072-f6f9-42da-8b86-5d6bff4b340c-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.422660 4742 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e9d19072-f6f9-42da-8b86-5d6bff4b340c-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.422671 4742 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e9d19072-f6f9-42da-8b86-5d6bff4b340c-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.422682 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-28kw5\" (UniqueName: \"kubernetes.io/projected/e9d19072-f6f9-42da-8b86-5d6bff4b340c-kube-api-access-28kw5\") on node \"crc\" DevicePath \"\"" Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.422696 4742 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e9d19072-f6f9-42da-8b86-5d6bff4b340c-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.689117 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-mp2sf"] Dec 05 05:58:33 crc kubenswrapper[4742]: I1205 05:58:33.696748 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-mp2sf"] Dec 05 05:58:34 crc kubenswrapper[4742]: I1205 05:58:34.393482 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9d19072-f6f9-42da-8b86-5d6bff4b340c" path="/var/lib/kubelet/pods/e9d19072-f6f9-42da-8b86-5d6bff4b340c/volumes" Dec 05 05:58:46 crc kubenswrapper[4742]: I1205 05:58:46.671747 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:58:46 crc kubenswrapper[4742]: I1205 05:58:46.673366 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:59:16 crc kubenswrapper[4742]: I1205 05:59:16.671669 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 05:59:16 crc kubenswrapper[4742]: I1205 05:59:16.672469 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 05:59:16 crc kubenswrapper[4742]: I1205 05:59:16.672536 4742 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" Dec 05 05:59:16 crc kubenswrapper[4742]: I1205 05:59:16.673764 4742 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"dc2a5e1e92e2a0c25ac27056156677176ab47a63f4ca68a05a9db44f9bedc61a"} pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 05:59:16 crc kubenswrapper[4742]: I1205 05:59:16.674016 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" containerID="cri-o://dc2a5e1e92e2a0c25ac27056156677176ab47a63f4ca68a05a9db44f9bedc61a" gracePeriod=600 Dec 05 05:59:17 crc kubenswrapper[4742]: I1205 05:59:17.637633 4742 generic.go:334] "Generic (PLEG): container finished" podID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerID="dc2a5e1e92e2a0c25ac27056156677176ab47a63f4ca68a05a9db44f9bedc61a" exitCode=0 Dec 05 05:59:17 crc kubenswrapper[4742]: I1205 05:59:17.637758 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerDied","Data":"dc2a5e1e92e2a0c25ac27056156677176ab47a63f4ca68a05a9db44f9bedc61a"} Dec 05 05:59:17 crc kubenswrapper[4742]: I1205 05:59:17.638496 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerStarted","Data":"cb5bbdf33d38a4f3a6dca04ce1c51478916fa93eae9566e2afc52c599bf9e737"} Dec 05 05:59:17 crc kubenswrapper[4742]: I1205 05:59:17.638539 4742 scope.go:117] "RemoveContainer" containerID="26aea08ace56c993fc15d19289e2f981dc0289bfa1f72141ca780e0a5761faf4" Dec 05 06:00:00 crc kubenswrapper[4742]: I1205 06:00:00.238352 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415240-bmlpw"] Dec 05 06:00:00 crc kubenswrapper[4742]: E1205 06:00:00.239366 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9d19072-f6f9-42da-8b86-5d6bff4b340c" containerName="registry" Dec 05 06:00:00 crc kubenswrapper[4742]: I1205 06:00:00.239386 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9d19072-f6f9-42da-8b86-5d6bff4b340c" containerName="registry" Dec 05 06:00:00 crc kubenswrapper[4742]: I1205 06:00:00.239502 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9d19072-f6f9-42da-8b86-5d6bff4b340c" containerName="registry" Dec 05 06:00:00 crc kubenswrapper[4742]: I1205 06:00:00.239942 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-bmlpw" Dec 05 06:00:00 crc kubenswrapper[4742]: I1205 06:00:00.245347 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 06:00:00 crc kubenswrapper[4742]: I1205 06:00:00.246254 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 06:00:00 crc kubenswrapper[4742]: I1205 06:00:00.248007 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415240-bmlpw"] Dec 05 06:00:00 crc kubenswrapper[4742]: I1205 06:00:00.266706 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aa52a9ca-a479-458f-a6f6-073376f06461-config-volume\") pod \"collect-profiles-29415240-bmlpw\" (UID: \"aa52a9ca-a479-458f-a6f6-073376f06461\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-bmlpw" Dec 05 06:00:00 crc kubenswrapper[4742]: I1205 06:00:00.266822 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aa52a9ca-a479-458f-a6f6-073376f06461-secret-volume\") pod \"collect-profiles-29415240-bmlpw\" (UID: \"aa52a9ca-a479-458f-a6f6-073376f06461\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-bmlpw" Dec 05 06:00:00 crc kubenswrapper[4742]: I1205 06:00:00.266956 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlfhx\" (UniqueName: \"kubernetes.io/projected/aa52a9ca-a479-458f-a6f6-073376f06461-kube-api-access-dlfhx\") pod \"collect-profiles-29415240-bmlpw\" (UID: \"aa52a9ca-a479-458f-a6f6-073376f06461\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-bmlpw" Dec 05 06:00:00 crc kubenswrapper[4742]: I1205 06:00:00.368373 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlfhx\" (UniqueName: \"kubernetes.io/projected/aa52a9ca-a479-458f-a6f6-073376f06461-kube-api-access-dlfhx\") pod \"collect-profiles-29415240-bmlpw\" (UID: \"aa52a9ca-a479-458f-a6f6-073376f06461\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-bmlpw" Dec 05 06:00:00 crc kubenswrapper[4742]: I1205 06:00:00.368449 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aa52a9ca-a479-458f-a6f6-073376f06461-config-volume\") pod \"collect-profiles-29415240-bmlpw\" (UID: \"aa52a9ca-a479-458f-a6f6-073376f06461\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-bmlpw" Dec 05 06:00:00 crc kubenswrapper[4742]: I1205 06:00:00.368563 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aa52a9ca-a479-458f-a6f6-073376f06461-secret-volume\") pod \"collect-profiles-29415240-bmlpw\" (UID: \"aa52a9ca-a479-458f-a6f6-073376f06461\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-bmlpw" Dec 05 06:00:00 crc kubenswrapper[4742]: I1205 06:00:00.369681 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aa52a9ca-a479-458f-a6f6-073376f06461-config-volume\") pod \"collect-profiles-29415240-bmlpw\" (UID: \"aa52a9ca-a479-458f-a6f6-073376f06461\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-bmlpw" Dec 05 06:00:00 crc kubenswrapper[4742]: I1205 06:00:00.376901 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aa52a9ca-a479-458f-a6f6-073376f06461-secret-volume\") pod \"collect-profiles-29415240-bmlpw\" (UID: \"aa52a9ca-a479-458f-a6f6-073376f06461\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-bmlpw" Dec 05 06:00:00 crc kubenswrapper[4742]: I1205 06:00:00.398813 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlfhx\" (UniqueName: \"kubernetes.io/projected/aa52a9ca-a479-458f-a6f6-073376f06461-kube-api-access-dlfhx\") pod \"collect-profiles-29415240-bmlpw\" (UID: \"aa52a9ca-a479-458f-a6f6-073376f06461\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-bmlpw" Dec 05 06:00:00 crc kubenswrapper[4742]: I1205 06:00:00.569042 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-bmlpw" Dec 05 06:00:01 crc kubenswrapper[4742]: I1205 06:00:01.072703 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415240-bmlpw"] Dec 05 06:00:01 crc kubenswrapper[4742]: W1205 06:00:01.095955 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaa52a9ca_a479_458f_a6f6_073376f06461.slice/crio-9e5f357256b54094be0ba3e608e104f387547127b4be4bf1f78f4f9595a907ca WatchSource:0}: Error finding container 9e5f357256b54094be0ba3e608e104f387547127b4be4bf1f78f4f9595a907ca: Status 404 returned error can't find the container with id 9e5f357256b54094be0ba3e608e104f387547127b4be4bf1f78f4f9595a907ca Dec 05 06:00:01 crc kubenswrapper[4742]: I1205 06:00:01.919453 4742 generic.go:334] "Generic (PLEG): container finished" podID="aa52a9ca-a479-458f-a6f6-073376f06461" containerID="abc38135b9aaf87a7d4f17967c3f97f9ab2929fa85279e4896a5b1a6507ed98e" exitCode=0 Dec 05 06:00:01 crc kubenswrapper[4742]: I1205 06:00:01.919533 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-bmlpw" event={"ID":"aa52a9ca-a479-458f-a6f6-073376f06461","Type":"ContainerDied","Data":"abc38135b9aaf87a7d4f17967c3f97f9ab2929fa85279e4896a5b1a6507ed98e"} Dec 05 06:00:01 crc kubenswrapper[4742]: I1205 06:00:01.919846 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-bmlpw" event={"ID":"aa52a9ca-a479-458f-a6f6-073376f06461","Type":"ContainerStarted","Data":"9e5f357256b54094be0ba3e608e104f387547127b4be4bf1f78f4f9595a907ca"} Dec 05 06:00:03 crc kubenswrapper[4742]: I1205 06:00:03.175614 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-bmlpw" Dec 05 06:00:03 crc kubenswrapper[4742]: I1205 06:00:03.310302 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dlfhx\" (UniqueName: \"kubernetes.io/projected/aa52a9ca-a479-458f-a6f6-073376f06461-kube-api-access-dlfhx\") pod \"aa52a9ca-a479-458f-a6f6-073376f06461\" (UID: \"aa52a9ca-a479-458f-a6f6-073376f06461\") " Dec 05 06:00:03 crc kubenswrapper[4742]: I1205 06:00:03.310400 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aa52a9ca-a479-458f-a6f6-073376f06461-config-volume\") pod \"aa52a9ca-a479-458f-a6f6-073376f06461\" (UID: \"aa52a9ca-a479-458f-a6f6-073376f06461\") " Dec 05 06:00:03 crc kubenswrapper[4742]: I1205 06:00:03.310448 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aa52a9ca-a479-458f-a6f6-073376f06461-secret-volume\") pod \"aa52a9ca-a479-458f-a6f6-073376f06461\" (UID: \"aa52a9ca-a479-458f-a6f6-073376f06461\") " Dec 05 06:00:03 crc kubenswrapper[4742]: I1205 06:00:03.311555 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aa52a9ca-a479-458f-a6f6-073376f06461-config-volume" (OuterVolumeSpecName: "config-volume") pod "aa52a9ca-a479-458f-a6f6-073376f06461" (UID: "aa52a9ca-a479-458f-a6f6-073376f06461"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:00:03 crc kubenswrapper[4742]: I1205 06:00:03.316359 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa52a9ca-a479-458f-a6f6-073376f06461-kube-api-access-dlfhx" (OuterVolumeSpecName: "kube-api-access-dlfhx") pod "aa52a9ca-a479-458f-a6f6-073376f06461" (UID: "aa52a9ca-a479-458f-a6f6-073376f06461"). InnerVolumeSpecName "kube-api-access-dlfhx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:00:03 crc kubenswrapper[4742]: I1205 06:00:03.317236 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa52a9ca-a479-458f-a6f6-073376f06461-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "aa52a9ca-a479-458f-a6f6-073376f06461" (UID: "aa52a9ca-a479-458f-a6f6-073376f06461"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:00:03 crc kubenswrapper[4742]: I1205 06:00:03.412470 4742 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aa52a9ca-a479-458f-a6f6-073376f06461-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 06:00:03 crc kubenswrapper[4742]: I1205 06:00:03.412531 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dlfhx\" (UniqueName: \"kubernetes.io/projected/aa52a9ca-a479-458f-a6f6-073376f06461-kube-api-access-dlfhx\") on node \"crc\" DevicePath \"\"" Dec 05 06:00:03 crc kubenswrapper[4742]: I1205 06:00:03.412550 4742 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aa52a9ca-a479-458f-a6f6-073376f06461-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 06:00:03 crc kubenswrapper[4742]: I1205 06:00:03.933146 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-bmlpw" event={"ID":"aa52a9ca-a479-458f-a6f6-073376f06461","Type":"ContainerDied","Data":"9e5f357256b54094be0ba3e608e104f387547127b4be4bf1f78f4f9595a907ca"} Dec 05 06:00:03 crc kubenswrapper[4742]: I1205 06:00:03.933812 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9e5f357256b54094be0ba3e608e104f387547127b4be4bf1f78f4f9595a907ca" Dec 05 06:00:03 crc kubenswrapper[4742]: I1205 06:00:03.933231 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415240-bmlpw" Dec 05 06:01:16 crc kubenswrapper[4742]: I1205 06:01:16.670927 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:01:16 crc kubenswrapper[4742]: I1205 06:01:16.672286 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:01:46 crc kubenswrapper[4742]: I1205 06:01:46.671584 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:01:46 crc kubenswrapper[4742]: I1205 06:01:46.672450 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:02:16 crc kubenswrapper[4742]: I1205 06:02:16.670982 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:02:16 crc kubenswrapper[4742]: I1205 06:02:16.673332 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:02:16 crc kubenswrapper[4742]: I1205 06:02:16.673462 4742 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" Dec 05 06:02:16 crc kubenswrapper[4742]: I1205 06:02:16.674665 4742 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cb5bbdf33d38a4f3a6dca04ce1c51478916fa93eae9566e2afc52c599bf9e737"} pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 06:02:16 crc kubenswrapper[4742]: I1205 06:02:16.674781 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" containerID="cri-o://cb5bbdf33d38a4f3a6dca04ce1c51478916fa93eae9566e2afc52c599bf9e737" gracePeriod=600 Dec 05 06:02:16 crc kubenswrapper[4742]: I1205 06:02:16.829688 4742 generic.go:334] "Generic (PLEG): container finished" podID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerID="cb5bbdf33d38a4f3a6dca04ce1c51478916fa93eae9566e2afc52c599bf9e737" exitCode=0 Dec 05 06:02:16 crc kubenswrapper[4742]: I1205 06:02:16.829754 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerDied","Data":"cb5bbdf33d38a4f3a6dca04ce1c51478916fa93eae9566e2afc52c599bf9e737"} Dec 05 06:02:16 crc kubenswrapper[4742]: I1205 06:02:16.829808 4742 scope.go:117] "RemoveContainer" containerID="dc2a5e1e92e2a0c25ac27056156677176ab47a63f4ca68a05a9db44f9bedc61a" Dec 05 06:02:17 crc kubenswrapper[4742]: I1205 06:02:17.840135 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerStarted","Data":"8712ed854f5ba4470f6a7971cc87ba22a52ea34afd6f25ae35634401b00bf15a"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.164472 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-m9jc4"] Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.166733 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="ovn-controller" containerID="cri-o://00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a" gracePeriod=30 Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.167183 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="sbdb" containerID="cri-o://a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1" gracePeriod=30 Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.167267 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="kube-rbac-proxy-node" containerID="cri-o://c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689" gracePeriod=30 Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.167306 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="northd" containerID="cri-o://9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409" gracePeriod=30 Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.167336 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787" gracePeriod=30 Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.167369 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="ovn-acl-logging" containerID="cri-o://7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d" gracePeriod=30 Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.167426 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="nbdb" containerID="cri-o://de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10" gracePeriod=30 Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.214271 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="ovnkube-controller" containerID="cri-o://4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81" gracePeriod=30 Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.421875 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-tkmd5"] Dec 05 06:03:59 crc kubenswrapper[4742]: E1205 06:03:59.422197 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa52a9ca-a479-458f-a6f6-073376f06461" containerName="collect-profiles" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.422221 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa52a9ca-a479-458f-a6f6-073376f06461" containerName="collect-profiles" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.422591 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa52a9ca-a479-458f-a6f6-073376f06461" containerName="collect-profiles" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.423209 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-tkmd5" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.424708 4742 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-dtfxz" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.425186 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.425233 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.425371 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.463950 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m9jc4_06ddc689-50f2-409f-9ac8-8f6a1bed0831/ovnkube-controller/3.log" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.466996 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m9jc4_06ddc689-50f2-409f-9ac8-8f6a1bed0831/ovn-acl-logging/0.log" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.469134 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m9jc4_06ddc689-50f2-409f-9ac8-8f6a1bed0831/ovn-controller/0.log" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.470823 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.523663 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-zbwkg"] Dec 05 06:03:59 crc kubenswrapper[4742]: E1205 06:03:59.523892 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="kubecfg-setup" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.523910 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="kubecfg-setup" Dec 05 06:03:59 crc kubenswrapper[4742]: E1205 06:03:59.523920 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="northd" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.523929 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="northd" Dec 05 06:03:59 crc kubenswrapper[4742]: E1205 06:03:59.523942 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="ovnkube-controller" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.523949 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="ovnkube-controller" Dec 05 06:03:59 crc kubenswrapper[4742]: E1205 06:03:59.523959 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="sbdb" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.523966 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="sbdb" Dec 05 06:03:59 crc kubenswrapper[4742]: E1205 06:03:59.523976 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.523983 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 06:03:59 crc kubenswrapper[4742]: E1205 06:03:59.523992 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="nbdb" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.523999 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="nbdb" Dec 05 06:03:59 crc kubenswrapper[4742]: E1205 06:03:59.524009 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="ovn-controller" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.524018 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="ovn-controller" Dec 05 06:03:59 crc kubenswrapper[4742]: E1205 06:03:59.524029 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="kube-rbac-proxy-node" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.524036 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="kube-rbac-proxy-node" Dec 05 06:03:59 crc kubenswrapper[4742]: E1205 06:03:59.524049 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="ovnkube-controller" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.524070 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="ovnkube-controller" Dec 05 06:03:59 crc kubenswrapper[4742]: E1205 06:03:59.524079 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="ovnkube-controller" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.524087 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="ovnkube-controller" Dec 05 06:03:59 crc kubenswrapper[4742]: E1205 06:03:59.524098 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="ovnkube-controller" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.524105 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="ovnkube-controller" Dec 05 06:03:59 crc kubenswrapper[4742]: E1205 06:03:59.524113 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="ovn-acl-logging" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.524120 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="ovn-acl-logging" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.524222 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.524235 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="ovnkube-controller" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.524243 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="northd" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.524253 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="kube-rbac-proxy-node" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.524262 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="ovnkube-controller" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.524271 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="nbdb" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.524277 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="ovnkube-controller" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.524284 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="ovn-controller" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.524292 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="ovnkube-controller" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.524298 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="ovn-acl-logging" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.524305 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="sbdb" Dec 05 06:03:59 crc kubenswrapper[4742]: E1205 06:03:59.524390 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="ovnkube-controller" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.524400 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="ovnkube-controller" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.524485 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerName="ovnkube-controller" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.526218 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.581614 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/06ddc689-50f2-409f-9ac8-8f6a1bed0831-env-overrides\") pod \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.581660 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-cni-netd\") pod \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.581687 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-etc-openvswitch\") pod \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.581705 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-systemd-units\") pod \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.581730 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/06ddc689-50f2-409f-9ac8-8f6a1bed0831-ovnkube-script-lib\") pod \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.581753 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-run-ovn-kubernetes\") pod \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.581776 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-log-socket\") pod \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.581767 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "06ddc689-50f2-409f-9ac8-8f6a1bed0831" (UID: "06ddc689-50f2-409f-9ac8-8f6a1bed0831"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.581812 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/06ddc689-50f2-409f-9ac8-8f6a1bed0831-ovnkube-config\") pod \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.581810 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "06ddc689-50f2-409f-9ac8-8f6a1bed0831" (UID: "06ddc689-50f2-409f-9ac8-8f6a1bed0831"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.581829 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "06ddc689-50f2-409f-9ac8-8f6a1bed0831" (UID: "06ddc689-50f2-409f-9ac8-8f6a1bed0831"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.581869 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/06ddc689-50f2-409f-9ac8-8f6a1bed0831-ovn-node-metrics-cert\") pod \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.581886 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-log-socket" (OuterVolumeSpecName: "log-socket") pod "06ddc689-50f2-409f-9ac8-8f6a1bed0831" (UID: "06ddc689-50f2-409f-9ac8-8f6a1bed0831"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.581895 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-var-lib-openvswitch\") pod \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.581918 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-var-lib-cni-networks-ovn-kubernetes\") pod \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.581943 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-cni-bin\") pod \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.581962 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-run-systemd\") pod \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.581987 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-kubelet\") pod \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582004 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-slash\") pod \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582030 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-node-log\") pod \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582079 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-run-openvswitch\") pod \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582111 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b82wp\" (UniqueName: \"kubernetes.io/projected/06ddc689-50f2-409f-9ac8-8f6a1bed0831-kube-api-access-b82wp\") pod \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582138 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-run-ovn\") pod \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582124 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/06ddc689-50f2-409f-9ac8-8f6a1bed0831-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "06ddc689-50f2-409f-9ac8-8f6a1bed0831" (UID: "06ddc689-50f2-409f-9ac8-8f6a1bed0831"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582165 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-run-netns\") pod \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\" (UID: \"06ddc689-50f2-409f-9ac8-8f6a1bed0831\") " Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582193 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "06ddc689-50f2-409f-9ac8-8f6a1bed0831" (UID: "06ddc689-50f2-409f-9ac8-8f6a1bed0831"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582237 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "06ddc689-50f2-409f-9ac8-8f6a1bed0831" (UID: "06ddc689-50f2-409f-9ac8-8f6a1bed0831"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582244 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "06ddc689-50f2-409f-9ac8-8f6a1bed0831" (UID: "06ddc689-50f2-409f-9ac8-8f6a1bed0831"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582302 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/06ddc689-50f2-409f-9ac8-8f6a1bed0831-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "06ddc689-50f2-409f-9ac8-8f6a1bed0831" (UID: "06ddc689-50f2-409f-9ac8-8f6a1bed0831"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582319 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/1a34f94f-28c8-4d9a-a52f-675e29d7bcb9-node-mnt\") pod \"crc-storage-crc-tkmd5\" (UID: \"1a34f94f-28c8-4d9a-a52f-675e29d7bcb9\") " pod="crc-storage/crc-storage-crc-tkmd5" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582334 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "06ddc689-50f2-409f-9ac8-8f6a1bed0831" (UID: "06ddc689-50f2-409f-9ac8-8f6a1bed0831"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582330 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/06ddc689-50f2-409f-9ac8-8f6a1bed0831-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "06ddc689-50f2-409f-9ac8-8f6a1bed0831" (UID: "06ddc689-50f2-409f-9ac8-8f6a1bed0831"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582353 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-slash" (OuterVolumeSpecName: "host-slash") pod "06ddc689-50f2-409f-9ac8-8f6a1bed0831" (UID: "06ddc689-50f2-409f-9ac8-8f6a1bed0831"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582345 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzthc\" (UniqueName: \"kubernetes.io/projected/1a34f94f-28c8-4d9a-a52f-675e29d7bcb9-kube-api-access-xzthc\") pod \"crc-storage-crc-tkmd5\" (UID: \"1a34f94f-28c8-4d9a-a52f-675e29d7bcb9\") " pod="crc-storage/crc-storage-crc-tkmd5" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582372 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-node-log" (OuterVolumeSpecName: "node-log") pod "06ddc689-50f2-409f-9ac8-8f6a1bed0831" (UID: "06ddc689-50f2-409f-9ac8-8f6a1bed0831"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582389 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "06ddc689-50f2-409f-9ac8-8f6a1bed0831" (UID: "06ddc689-50f2-409f-9ac8-8f6a1bed0831"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582424 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "06ddc689-50f2-409f-9ac8-8f6a1bed0831" (UID: "06ddc689-50f2-409f-9ac8-8f6a1bed0831"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582454 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/1a34f94f-28c8-4d9a-a52f-675e29d7bcb9-crc-storage\") pod \"crc-storage-crc-tkmd5\" (UID: \"1a34f94f-28c8-4d9a-a52f-675e29d7bcb9\") " pod="crc-storage/crc-storage-crc-tkmd5" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582480 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "06ddc689-50f2-409f-9ac8-8f6a1bed0831" (UID: "06ddc689-50f2-409f-9ac8-8f6a1bed0831"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582655 4742 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582672 4742 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582683 4742 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-slash\") on node \"crc\" DevicePath \"\"" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582694 4742 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-node-log\") on node \"crc\" DevicePath \"\"" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582704 4742 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582715 4742 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582726 4742 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582738 4742 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/06ddc689-50f2-409f-9ac8-8f6a1bed0831-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582747 4742 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582757 4742 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582767 4742 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/06ddc689-50f2-409f-9ac8-8f6a1bed0831-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582775 4742 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582785 4742 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-log-socket\") on node \"crc\" DevicePath \"\"" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582793 4742 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/06ddc689-50f2-409f-9ac8-8f6a1bed0831-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582801 4742 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582810 4742 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.582847 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "06ddc689-50f2-409f-9ac8-8f6a1bed0831" (UID: "06ddc689-50f2-409f-9ac8-8f6a1bed0831"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.587164 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06ddc689-50f2-409f-9ac8-8f6a1bed0831-kube-api-access-b82wp" (OuterVolumeSpecName: "kube-api-access-b82wp") pod "06ddc689-50f2-409f-9ac8-8f6a1bed0831" (UID: "06ddc689-50f2-409f-9ac8-8f6a1bed0831"). InnerVolumeSpecName "kube-api-access-b82wp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.593731 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06ddc689-50f2-409f-9ac8-8f6a1bed0831-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "06ddc689-50f2-409f-9ac8-8f6a1bed0831" (UID: "06ddc689-50f2-409f-9ac8-8f6a1bed0831"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.593747 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "06ddc689-50f2-409f-9ac8-8f6a1bed0831" (UID: "06ddc689-50f2-409f-9ac8-8f6a1bed0831"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.604736 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m9jc4_06ddc689-50f2-409f-9ac8-8f6a1bed0831/ovnkube-controller/3.log" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.607021 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m9jc4_06ddc689-50f2-409f-9ac8-8f6a1bed0831/ovn-acl-logging/0.log" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.607496 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-m9jc4_06ddc689-50f2-409f-9ac8-8f6a1bed0831/ovn-controller/0.log" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.607775 4742 generic.go:334] "Generic (PLEG): container finished" podID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerID="4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81" exitCode=0 Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.607796 4742 generic.go:334] "Generic (PLEG): container finished" podID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerID="a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1" exitCode=0 Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.607804 4742 generic.go:334] "Generic (PLEG): container finished" podID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerID="de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10" exitCode=0 Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.607799 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" event={"ID":"06ddc689-50f2-409f-9ac8-8f6a1bed0831","Type":"ContainerDied","Data":"4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.607840 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" event={"ID":"06ddc689-50f2-409f-9ac8-8f6a1bed0831","Type":"ContainerDied","Data":"a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.607853 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" event={"ID":"06ddc689-50f2-409f-9ac8-8f6a1bed0831","Type":"ContainerDied","Data":"de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.607862 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" event={"ID":"06ddc689-50f2-409f-9ac8-8f6a1bed0831","Type":"ContainerDied","Data":"9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.607877 4742 scope.go:117] "RemoveContainer" containerID="4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.607890 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.607811 4742 generic.go:334] "Generic (PLEG): container finished" podID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerID="9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409" exitCode=0 Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.607945 4742 generic.go:334] "Generic (PLEG): container finished" podID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerID="992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787" exitCode=0 Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.607957 4742 generic.go:334] "Generic (PLEG): container finished" podID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerID="c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689" exitCode=0 Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.607965 4742 generic.go:334] "Generic (PLEG): container finished" podID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerID="7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d" exitCode=143 Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.607974 4742 generic.go:334] "Generic (PLEG): container finished" podID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" containerID="00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a" exitCode=143 Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608001 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" event={"ID":"06ddc689-50f2-409f-9ac8-8f6a1bed0831","Type":"ContainerDied","Data":"992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608013 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" event={"ID":"06ddc689-50f2-409f-9ac8-8f6a1bed0831","Type":"ContainerDied","Data":"c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608022 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608032 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608037 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608042 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608049 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608071 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608076 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608082 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608087 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608096 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" event={"ID":"06ddc689-50f2-409f-9ac8-8f6a1bed0831","Type":"ContainerDied","Data":"7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608104 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608110 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608115 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608121 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608126 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608131 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608136 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608141 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608146 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608151 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608158 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" event={"ID":"06ddc689-50f2-409f-9ac8-8f6a1bed0831","Type":"ContainerDied","Data":"00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608167 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608174 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608179 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608184 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608189 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608194 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608200 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608205 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608210 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608216 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608222 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-m9jc4" event={"ID":"06ddc689-50f2-409f-9ac8-8f6a1bed0831","Type":"ContainerDied","Data":"61fd67b2c6e7a4d2aa2ef4aecffc4bcd9a0db0ca9c87d712b659ad7f5450e9cd"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608230 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608237 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608242 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608247 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608252 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608258 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608263 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608268 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608273 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.608278 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.610757 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-776bt_39641a18-5d13-441f-9956-3777b9f27703/kube-multus/2.log" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.611177 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-776bt_39641a18-5d13-441f-9956-3777b9f27703/kube-multus/1.log" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.611207 4742 generic.go:334] "Generic (PLEG): container finished" podID="39641a18-5d13-441f-9956-3777b9f27703" containerID="27d377e2c76cba7de8c7c932e8375753cd96d8de8cb46c0a87705032bf8934b1" exitCode=2 Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.611226 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-776bt" event={"ID":"39641a18-5d13-441f-9956-3777b9f27703","Type":"ContainerDied","Data":"27d377e2c76cba7de8c7c932e8375753cd96d8de8cb46c0a87705032bf8934b1"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.611239 4742 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c8018f3950c937efad0c3cafc0ce7a20baefa32c9176d2de69397d16610bf422"} Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.611579 4742 scope.go:117] "RemoveContainer" containerID="27d377e2c76cba7de8c7c932e8375753cd96d8de8cb46c0a87705032bf8934b1" Dec 05 06:03:59 crc kubenswrapper[4742]: E1205 06:03:59.611739 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-776bt_openshift-multus(39641a18-5d13-441f-9956-3777b9f27703)\"" pod="openshift-multus/multus-776bt" podUID="39641a18-5d13-441f-9956-3777b9f27703" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.628386 4742 scope.go:117] "RemoveContainer" containerID="d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.654417 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-m9jc4"] Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.657299 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-m9jc4"] Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.668223 4742 scope.go:117] "RemoveContainer" containerID="a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.684262 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-host-run-netns\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.684442 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-host-slash\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.684576 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-host-cni-netd\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.684643 4742 scope.go:117] "RemoveContainer" containerID="de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.684674 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/fbfe7bce-dbcc-4cef-9562-f260c95eead1-ovn-node-metrics-cert\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.684697 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-host-kubelet\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.684724 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/fbfe7bce-dbcc-4cef-9562-f260c95eead1-ovnkube-script-lib\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.684838 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-var-lib-openvswitch\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.684873 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/fbfe7bce-dbcc-4cef-9562-f260c95eead1-ovnkube-config\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.684925 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.684943 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-run-systemd\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.684987 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/1a34f94f-28c8-4d9a-a52f-675e29d7bcb9-node-mnt\") pod \"crc-storage-crc-tkmd5\" (UID: \"1a34f94f-28c8-4d9a-a52f-675e29d7bcb9\") " pod="crc-storage/crc-storage-crc-tkmd5" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.685009 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzthc\" (UniqueName: \"kubernetes.io/projected/1a34f94f-28c8-4d9a-a52f-675e29d7bcb9-kube-api-access-xzthc\") pod \"crc-storage-crc-tkmd5\" (UID: \"1a34f94f-28c8-4d9a-a52f-675e29d7bcb9\") " pod="crc-storage/crc-storage-crc-tkmd5" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.685026 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-etc-openvswitch\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.685040 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-run-ovn\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.685121 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/fbfe7bce-dbcc-4cef-9562-f260c95eead1-env-overrides\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.685151 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-host-cni-bin\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.685195 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/1a34f94f-28c8-4d9a-a52f-675e29d7bcb9-crc-storage\") pod \"crc-storage-crc-tkmd5\" (UID: \"1a34f94f-28c8-4d9a-a52f-675e29d7bcb9\") " pod="crc-storage/crc-storage-crc-tkmd5" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.685223 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2v6c\" (UniqueName: \"kubernetes.io/projected/fbfe7bce-dbcc-4cef-9562-f260c95eead1-kube-api-access-x2v6c\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.685282 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-log-socket\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.685312 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-systemd-units\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.685342 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-node-log\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.685368 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-run-openvswitch\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.685421 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-host-run-ovn-kubernetes\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.685678 4742 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.685707 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b82wp\" (UniqueName: \"kubernetes.io/projected/06ddc689-50f2-409f-9ac8-8f6a1bed0831-kube-api-access-b82wp\") on node \"crc\" DevicePath \"\"" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.685727 4742 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/06ddc689-50f2-409f-9ac8-8f6a1bed0831-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.685742 4742 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/06ddc689-50f2-409f-9ac8-8f6a1bed0831-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.685712 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/1a34f94f-28c8-4d9a-a52f-675e29d7bcb9-node-mnt\") pod \"crc-storage-crc-tkmd5\" (UID: \"1a34f94f-28c8-4d9a-a52f-675e29d7bcb9\") " pod="crc-storage/crc-storage-crc-tkmd5" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.686935 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/1a34f94f-28c8-4d9a-a52f-675e29d7bcb9-crc-storage\") pod \"crc-storage-crc-tkmd5\" (UID: \"1a34f94f-28c8-4d9a-a52f-675e29d7bcb9\") " pod="crc-storage/crc-storage-crc-tkmd5" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.705659 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzthc\" (UniqueName: \"kubernetes.io/projected/1a34f94f-28c8-4d9a-a52f-675e29d7bcb9-kube-api-access-xzthc\") pod \"crc-storage-crc-tkmd5\" (UID: \"1a34f94f-28c8-4d9a-a52f-675e29d7bcb9\") " pod="crc-storage/crc-storage-crc-tkmd5" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.706433 4742 scope.go:117] "RemoveContainer" containerID="9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.721358 4742 scope.go:117] "RemoveContainer" containerID="992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.733674 4742 scope.go:117] "RemoveContainer" containerID="c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.751114 4742 scope.go:117] "RemoveContainer" containerID="7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.765718 4742 scope.go:117] "RemoveContainer" containerID="00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.778475 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-tkmd5" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.781801 4742 scope.go:117] "RemoveContainer" containerID="4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.787440 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/fbfe7bce-dbcc-4cef-9562-f260c95eead1-ovn-node-metrics-cert\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.787521 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-host-kubelet\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.787561 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/fbfe7bce-dbcc-4cef-9562-f260c95eead1-ovnkube-script-lib\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.787609 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-var-lib-openvswitch\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.787642 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/fbfe7bce-dbcc-4cef-9562-f260c95eead1-ovnkube-config\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.787684 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.787714 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-run-systemd\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.787744 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-run-ovn\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.787981 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-etc-openvswitch\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.787995 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-var-lib-openvswitch\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.788021 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/fbfe7bce-dbcc-4cef-9562-f260c95eead1-env-overrides\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.788047 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-host-kubelet\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.788088 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-host-cni-bin\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.788091 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.788129 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2v6c\" (UniqueName: \"kubernetes.io/projected/fbfe7bce-dbcc-4cef-9562-f260c95eead1-kube-api-access-x2v6c\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.788244 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-log-socket\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.788307 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-systemd-units\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.788341 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-node-log\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.788384 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-run-openvswitch\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.788438 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-host-run-ovn-kubernetes\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.788483 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-host-run-netns\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.788531 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-host-slash\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.788608 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-host-cni-netd\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.788669 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/fbfe7bce-dbcc-4cef-9562-f260c95eead1-ovnkube-script-lib\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.788714 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-systemd-units\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.788739 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-host-cni-netd\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.788790 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-node-log\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.788834 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-run-openvswitch\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.788876 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-host-run-ovn-kubernetes\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.788881 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-run-systemd\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.788916 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-host-run-netns\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.788948 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-run-ovn\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.788958 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-host-slash\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.788991 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-etc-openvswitch\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.789009 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-host-cni-bin\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.789043 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/fbfe7bce-dbcc-4cef-9562-f260c95eead1-log-socket\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.789267 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/fbfe7bce-dbcc-4cef-9562-f260c95eead1-ovnkube-config\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.789721 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/fbfe7bce-dbcc-4cef-9562-f260c95eead1-env-overrides\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.795650 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/fbfe7bce-dbcc-4cef-9562-f260c95eead1-ovn-node-metrics-cert\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.800709 4742 scope.go:117] "RemoveContainer" containerID="4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81" Dec 05 06:03:59 crc kubenswrapper[4742]: E1205 06:03:59.801679 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81\": container with ID starting with 4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81 not found: ID does not exist" containerID="4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.801725 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81"} err="failed to get container status \"4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81\": rpc error: code = NotFound desc = could not find container \"4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81\": container with ID starting with 4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.801761 4742 scope.go:117] "RemoveContainer" containerID="d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251" Dec 05 06:03:59 crc kubenswrapper[4742]: E1205 06:03:59.802429 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251\": container with ID starting with d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251 not found: ID does not exist" containerID="d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.802473 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251"} err="failed to get container status \"d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251\": rpc error: code = NotFound desc = could not find container \"d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251\": container with ID starting with d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.802502 4742 scope.go:117] "RemoveContainer" containerID="a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1" Dec 05 06:03:59 crc kubenswrapper[4742]: E1205 06:03:59.802931 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\": container with ID starting with a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1 not found: ID does not exist" containerID="a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.802963 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1"} err="failed to get container status \"a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\": rpc error: code = NotFound desc = could not find container \"a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\": container with ID starting with a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.802984 4742 scope.go:117] "RemoveContainer" containerID="de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10" Dec 05 06:03:59 crc kubenswrapper[4742]: E1205 06:03:59.803354 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\": container with ID starting with de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10 not found: ID does not exist" containerID="de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.803388 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10"} err="failed to get container status \"de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\": rpc error: code = NotFound desc = could not find container \"de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\": container with ID starting with de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.803408 4742 scope.go:117] "RemoveContainer" containerID="9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409" Dec 05 06:03:59 crc kubenswrapper[4742]: E1205 06:03:59.803704 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\": container with ID starting with 9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409 not found: ID does not exist" containerID="9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.803737 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409"} err="failed to get container status \"9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\": rpc error: code = NotFound desc = could not find container \"9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\": container with ID starting with 9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.803762 4742 scope.go:117] "RemoveContainer" containerID="992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787" Dec 05 06:03:59 crc kubenswrapper[4742]: E1205 06:03:59.804182 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\": container with ID starting with 992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787 not found: ID does not exist" containerID="992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.804210 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787"} err="failed to get container status \"992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\": rpc error: code = NotFound desc = could not find container \"992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\": container with ID starting with 992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.804231 4742 scope.go:117] "RemoveContainer" containerID="c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689" Dec 05 06:03:59 crc kubenswrapper[4742]: E1205 06:03:59.804624 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\": container with ID starting with c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689 not found: ID does not exist" containerID="c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.804655 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689"} err="failed to get container status \"c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\": rpc error: code = NotFound desc = could not find container \"c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\": container with ID starting with c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.804674 4742 scope.go:117] "RemoveContainer" containerID="7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d" Dec 05 06:03:59 crc kubenswrapper[4742]: E1205 06:03:59.804980 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\": container with ID starting with 7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d not found: ID does not exist" containerID="7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.805004 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d"} err="failed to get container status \"7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\": rpc error: code = NotFound desc = could not find container \"7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\": container with ID starting with 7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.805018 4742 scope.go:117] "RemoveContainer" containerID="00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a" Dec 05 06:03:59 crc kubenswrapper[4742]: E1205 06:03:59.805410 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\": container with ID starting with 00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a not found: ID does not exist" containerID="00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.805431 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a"} err="failed to get container status \"00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\": rpc error: code = NotFound desc = could not find container \"00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\": container with ID starting with 00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.805444 4742 scope.go:117] "RemoveContainer" containerID="4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.805524 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2v6c\" (UniqueName: \"kubernetes.io/projected/fbfe7bce-dbcc-4cef-9562-f260c95eead1-kube-api-access-x2v6c\") pod \"ovnkube-node-zbwkg\" (UID: \"fbfe7bce-dbcc-4cef-9562-f260c95eead1\") " pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:03:59 crc kubenswrapper[4742]: E1205 06:03:59.805717 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\": container with ID starting with 4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5 not found: ID does not exist" containerID="4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.805742 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5"} err="failed to get container status \"4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\": rpc error: code = NotFound desc = could not find container \"4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\": container with ID starting with 4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.805755 4742 scope.go:117] "RemoveContainer" containerID="4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.806000 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81"} err="failed to get container status \"4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81\": rpc error: code = NotFound desc = could not find container \"4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81\": container with ID starting with 4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.806030 4742 scope.go:117] "RemoveContainer" containerID="d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.806282 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251"} err="failed to get container status \"d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251\": rpc error: code = NotFound desc = could not find container \"d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251\": container with ID starting with d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.806302 4742 scope.go:117] "RemoveContainer" containerID="a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.806615 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1"} err="failed to get container status \"a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\": rpc error: code = NotFound desc = could not find container \"a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\": container with ID starting with a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.806638 4742 scope.go:117] "RemoveContainer" containerID="de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.806811 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10"} err="failed to get container status \"de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\": rpc error: code = NotFound desc = could not find container \"de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\": container with ID starting with de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.806829 4742 scope.go:117] "RemoveContainer" containerID="9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.806994 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409"} err="failed to get container status \"9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\": rpc error: code = NotFound desc = could not find container \"9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\": container with ID starting with 9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.807012 4742 scope.go:117] "RemoveContainer" containerID="992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.807322 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787"} err="failed to get container status \"992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\": rpc error: code = NotFound desc = could not find container \"992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\": container with ID starting with 992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.807344 4742 scope.go:117] "RemoveContainer" containerID="c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.807684 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689"} err="failed to get container status \"c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\": rpc error: code = NotFound desc = could not find container \"c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\": container with ID starting with c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.807705 4742 scope.go:117] "RemoveContainer" containerID="7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.807886 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d"} err="failed to get container status \"7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\": rpc error: code = NotFound desc = could not find container \"7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\": container with ID starting with 7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.807905 4742 scope.go:117] "RemoveContainer" containerID="00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.808086 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a"} err="failed to get container status \"00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\": rpc error: code = NotFound desc = could not find container \"00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\": container with ID starting with 00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.808101 4742 scope.go:117] "RemoveContainer" containerID="4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.808399 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5"} err="failed to get container status \"4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\": rpc error: code = NotFound desc = could not find container \"4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\": container with ID starting with 4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.808417 4742 scope.go:117] "RemoveContainer" containerID="4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.808598 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81"} err="failed to get container status \"4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81\": rpc error: code = NotFound desc = could not find container \"4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81\": container with ID starting with 4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.808620 4742 scope.go:117] "RemoveContainer" containerID="d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.808946 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251"} err="failed to get container status \"d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251\": rpc error: code = NotFound desc = could not find container \"d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251\": container with ID starting with d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.808967 4742 scope.go:117] "RemoveContainer" containerID="a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.809159 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1"} err="failed to get container status \"a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\": rpc error: code = NotFound desc = could not find container \"a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\": container with ID starting with a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.809227 4742 scope.go:117] "RemoveContainer" containerID="de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.809938 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10"} err="failed to get container status \"de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\": rpc error: code = NotFound desc = could not find container \"de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\": container with ID starting with de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.809958 4742 scope.go:117] "RemoveContainer" containerID="9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.810172 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409"} err="failed to get container status \"9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\": rpc error: code = NotFound desc = could not find container \"9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\": container with ID starting with 9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.810190 4742 scope.go:117] "RemoveContainer" containerID="992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.810402 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787"} err="failed to get container status \"992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\": rpc error: code = NotFound desc = could not find container \"992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\": container with ID starting with 992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.810420 4742 scope.go:117] "RemoveContainer" containerID="c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.810625 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689"} err="failed to get container status \"c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\": rpc error: code = NotFound desc = could not find container \"c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\": container with ID starting with c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.810649 4742 scope.go:117] "RemoveContainer" containerID="7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.810847 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d"} err="failed to get container status \"7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\": rpc error: code = NotFound desc = could not find container \"7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\": container with ID starting with 7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.810869 4742 scope.go:117] "RemoveContainer" containerID="00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.811074 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a"} err="failed to get container status \"00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\": rpc error: code = NotFound desc = could not find container \"00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\": container with ID starting with 00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.811097 4742 scope.go:117] "RemoveContainer" containerID="4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.811394 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5"} err="failed to get container status \"4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\": rpc error: code = NotFound desc = could not find container \"4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\": container with ID starting with 4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.811414 4742 scope.go:117] "RemoveContainer" containerID="4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.811770 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81"} err="failed to get container status \"4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81\": rpc error: code = NotFound desc = could not find container \"4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81\": container with ID starting with 4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.811789 4742 scope.go:117] "RemoveContainer" containerID="d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.811981 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251"} err="failed to get container status \"d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251\": rpc error: code = NotFound desc = could not find container \"d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251\": container with ID starting with d8f102704dc40fbeacb3f714997407b1d96dac763889a146daea6f83b360e251 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.811997 4742 scope.go:117] "RemoveContainer" containerID="a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.812166 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1"} err="failed to get container status \"a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\": rpc error: code = NotFound desc = could not find container \"a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1\": container with ID starting with a5e2d95ee2d77519000855aa548e176a01166ae77785488a8ad6df4f0c4f34f1 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.812184 4742 scope.go:117] "RemoveContainer" containerID="de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.812437 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10"} err="failed to get container status \"de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\": rpc error: code = NotFound desc = could not find container \"de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10\": container with ID starting with de6d272fcb9b4d3b9b5166faa79225f1d061d37d311eb52ce9864994106a6e10 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.812453 4742 scope.go:117] "RemoveContainer" containerID="9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.812731 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409"} err="failed to get container status \"9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\": rpc error: code = NotFound desc = could not find container \"9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409\": container with ID starting with 9915469014b69078df3e8ca8f32e452b50abc011b03ea5424c681b2e29b08409 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.812747 4742 scope.go:117] "RemoveContainer" containerID="992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.812996 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787"} err="failed to get container status \"992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\": rpc error: code = NotFound desc = could not find container \"992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787\": container with ID starting with 992b664c237121a80551d21d7b7f8d53d5f71499479445651b9e55485cb92787 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.813021 4742 scope.go:117] "RemoveContainer" containerID="c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.813392 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689"} err="failed to get container status \"c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\": rpc error: code = NotFound desc = could not find container \"c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689\": container with ID starting with c9dac31a10e6c9f2200aacf376bc19b506b93db4271a1318b32021de2cfc1689 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.813416 4742 scope.go:117] "RemoveContainer" containerID="7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.813614 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d"} err="failed to get container status \"7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\": rpc error: code = NotFound desc = could not find container \"7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d\": container with ID starting with 7c50181d6e4051cd912f0a7bccaa92c2fb35cdead12f2c0be28522aecfe70e7d not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.813636 4742 scope.go:117] "RemoveContainer" containerID="00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.813845 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a"} err="failed to get container status \"00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\": rpc error: code = NotFound desc = could not find container \"00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a\": container with ID starting with 00ed60d7c23397c2e43681973cd60f2703051203b15f9905975a5076b9c1cd6a not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.813866 4742 scope.go:117] "RemoveContainer" containerID="4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.814227 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5"} err="failed to get container status \"4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\": rpc error: code = NotFound desc = could not find container \"4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5\": container with ID starting with 4fda38803d37aa33b8d45a8cdcc2ceffc4d685cfb75b9fdad338ea425a8f75e5 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.814252 4742 scope.go:117] "RemoveContainer" containerID="4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.814467 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81"} err="failed to get container status \"4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81\": rpc error: code = NotFound desc = could not find container \"4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81\": container with ID starting with 4e7bbd2294beccc542ec1550b4f56f4eed968d6677e358c6ecbe3403a80b4c81 not found: ID does not exist" Dec 05 06:03:59 crc kubenswrapper[4742]: E1205 06:03:59.815640 4742 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-tkmd5_crc-storage_1a34f94f-28c8-4d9a-a52f-675e29d7bcb9_0(a69456745cc1976c2c11e594a456c944f97edf8f389ae888175592ccd3c99e3c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 06:03:59 crc kubenswrapper[4742]: E1205 06:03:59.815685 4742 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-tkmd5_crc-storage_1a34f94f-28c8-4d9a-a52f-675e29d7bcb9_0(a69456745cc1976c2c11e594a456c944f97edf8f389ae888175592ccd3c99e3c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-tkmd5" Dec 05 06:03:59 crc kubenswrapper[4742]: E1205 06:03:59.815706 4742 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-tkmd5_crc-storage_1a34f94f-28c8-4d9a-a52f-675e29d7bcb9_0(a69456745cc1976c2c11e594a456c944f97edf8f389ae888175592ccd3c99e3c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-tkmd5" Dec 05 06:03:59 crc kubenswrapper[4742]: E1205 06:03:59.815747 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-tkmd5_crc-storage(1a34f94f-28c8-4d9a-a52f-675e29d7bcb9)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-tkmd5_crc-storage(1a34f94f-28c8-4d9a-a52f-675e29d7bcb9)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-tkmd5_crc-storage_1a34f94f-28c8-4d9a-a52f-675e29d7bcb9_0(a69456745cc1976c2c11e594a456c944f97edf8f389ae888175592ccd3c99e3c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-tkmd5" podUID="1a34f94f-28c8-4d9a-a52f-675e29d7bcb9" Dec 05 06:03:59 crc kubenswrapper[4742]: I1205 06:03:59.846033 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:04:00 crc kubenswrapper[4742]: I1205 06:04:00.391663 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06ddc689-50f2-409f-9ac8-8f6a1bed0831" path="/var/lib/kubelet/pods/06ddc689-50f2-409f-9ac8-8f6a1bed0831/volumes" Dec 05 06:04:00 crc kubenswrapper[4742]: I1205 06:04:00.620969 4742 generic.go:334] "Generic (PLEG): container finished" podID="fbfe7bce-dbcc-4cef-9562-f260c95eead1" containerID="54743b06b52ed821655f106c5ecb60b60272a54728fb9a2cec91403a4d75d028" exitCode=0 Dec 05 06:04:00 crc kubenswrapper[4742]: I1205 06:04:00.621120 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" event={"ID":"fbfe7bce-dbcc-4cef-9562-f260c95eead1","Type":"ContainerDied","Data":"54743b06b52ed821655f106c5ecb60b60272a54728fb9a2cec91403a4d75d028"} Dec 05 06:04:00 crc kubenswrapper[4742]: I1205 06:04:00.621182 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" event={"ID":"fbfe7bce-dbcc-4cef-9562-f260c95eead1","Type":"ContainerStarted","Data":"a4416527ff407e2898a4af0ae2cd703b738dbeeabfba32326fd9699a5c8e4795"} Dec 05 06:04:01 crc kubenswrapper[4742]: I1205 06:04:01.634777 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" event={"ID":"fbfe7bce-dbcc-4cef-9562-f260c95eead1","Type":"ContainerStarted","Data":"20ec30558b3034541989dd42fa39243e9fae2954a773d45e42694407163c5a6b"} Dec 05 06:04:01 crc kubenswrapper[4742]: I1205 06:04:01.635160 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" event={"ID":"fbfe7bce-dbcc-4cef-9562-f260c95eead1","Type":"ContainerStarted","Data":"effaa6d313f80976c6811582ec805c02dbc7baf5445af6e947d5e8cc35013512"} Dec 05 06:04:01 crc kubenswrapper[4742]: I1205 06:04:01.635177 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" event={"ID":"fbfe7bce-dbcc-4cef-9562-f260c95eead1","Type":"ContainerStarted","Data":"c45430221d8729fcaca620496366d1ce9d3b6ee01bac87e8f00a6d8610fbded6"} Dec 05 06:04:01 crc kubenswrapper[4742]: I1205 06:04:01.635190 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" event={"ID":"fbfe7bce-dbcc-4cef-9562-f260c95eead1","Type":"ContainerStarted","Data":"87034eaf1f9e1c4cb77dc6ed84c648592fa5dc75e08e440224d129100443ddf1"} Dec 05 06:04:01 crc kubenswrapper[4742]: I1205 06:04:01.635203 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" event={"ID":"fbfe7bce-dbcc-4cef-9562-f260c95eead1","Type":"ContainerStarted","Data":"d3e706d20e575da12d3bb64c12f34e5a122210bf72b1fcc0ea0e239613190fe6"} Dec 05 06:04:01 crc kubenswrapper[4742]: I1205 06:04:01.635217 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" event={"ID":"fbfe7bce-dbcc-4cef-9562-f260c95eead1","Type":"ContainerStarted","Data":"0d62297c261318afb72cac073bf58815fc16ec13121e2e1d95ddf5451f19d250"} Dec 05 06:04:04 crc kubenswrapper[4742]: I1205 06:04:04.664411 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" event={"ID":"fbfe7bce-dbcc-4cef-9562-f260c95eead1","Type":"ContainerStarted","Data":"f05f5b3fca27f3d5b2b222ae9351da6dde6e2daff5dda3660c056093623a842a"} Dec 05 06:04:04 crc kubenswrapper[4742]: I1205 06:04:04.695599 4742 scope.go:117] "RemoveContainer" containerID="c8018f3950c937efad0c3cafc0ce7a20baefa32c9176d2de69397d16610bf422" Dec 05 06:04:05 crc kubenswrapper[4742]: I1205 06:04:05.674969 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-776bt_39641a18-5d13-441f-9956-3777b9f27703/kube-multus/2.log" Dec 05 06:04:06 crc kubenswrapper[4742]: I1205 06:04:06.682357 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" event={"ID":"fbfe7bce-dbcc-4cef-9562-f260c95eead1","Type":"ContainerStarted","Data":"baea1309c8f2185954b66a3228cd90ef3ad2c7f4fd4004edeba1ed42ed2ae430"} Dec 05 06:04:06 crc kubenswrapper[4742]: I1205 06:04:06.683628 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:04:06 crc kubenswrapper[4742]: I1205 06:04:06.683650 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:04:06 crc kubenswrapper[4742]: I1205 06:04:06.683687 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:04:06 crc kubenswrapper[4742]: I1205 06:04:06.712453 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" podStartSLOduration=7.712434401 podStartE2EDuration="7.712434401s" podCreationTimestamp="2025-12-05 06:03:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:04:06.708360664 +0000 UTC m=+722.620495746" watchObservedRunningTime="2025-12-05 06:04:06.712434401 +0000 UTC m=+722.624569473" Dec 05 06:04:06 crc kubenswrapper[4742]: I1205 06:04:06.718105 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:04:06 crc kubenswrapper[4742]: I1205 06:04:06.718189 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:04:06 crc kubenswrapper[4742]: I1205 06:04:06.974689 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-tkmd5"] Dec 05 06:04:06 crc kubenswrapper[4742]: I1205 06:04:06.975211 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-tkmd5" Dec 05 06:04:06 crc kubenswrapper[4742]: I1205 06:04:06.975760 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-tkmd5" Dec 05 06:04:07 crc kubenswrapper[4742]: E1205 06:04:07.001400 4742 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-tkmd5_crc-storage_1a34f94f-28c8-4d9a-a52f-675e29d7bcb9_0(3a741b62da6857e285732eaa845a5e84436c4ee0717560a9426eabb164a019e6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 06:04:07 crc kubenswrapper[4742]: E1205 06:04:07.001494 4742 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-tkmd5_crc-storage_1a34f94f-28c8-4d9a-a52f-675e29d7bcb9_0(3a741b62da6857e285732eaa845a5e84436c4ee0717560a9426eabb164a019e6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-tkmd5" Dec 05 06:04:07 crc kubenswrapper[4742]: E1205 06:04:07.001527 4742 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-tkmd5_crc-storage_1a34f94f-28c8-4d9a-a52f-675e29d7bcb9_0(3a741b62da6857e285732eaa845a5e84436c4ee0717560a9426eabb164a019e6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-tkmd5" Dec 05 06:04:07 crc kubenswrapper[4742]: E1205 06:04:07.001590 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-tkmd5_crc-storage(1a34f94f-28c8-4d9a-a52f-675e29d7bcb9)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-tkmd5_crc-storage(1a34f94f-28c8-4d9a-a52f-675e29d7bcb9)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-tkmd5_crc-storage_1a34f94f-28c8-4d9a-a52f-675e29d7bcb9_0(3a741b62da6857e285732eaa845a5e84436c4ee0717560a9426eabb164a019e6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-tkmd5" podUID="1a34f94f-28c8-4d9a-a52f-675e29d7bcb9" Dec 05 06:04:14 crc kubenswrapper[4742]: I1205 06:04:14.390384 4742 scope.go:117] "RemoveContainer" containerID="27d377e2c76cba7de8c7c932e8375753cd96d8de8cb46c0a87705032bf8934b1" Dec 05 06:04:15 crc kubenswrapper[4742]: I1205 06:04:15.750227 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-776bt_39641a18-5d13-441f-9956-3777b9f27703/kube-multus/2.log" Dec 05 06:04:15 crc kubenswrapper[4742]: I1205 06:04:15.750603 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-776bt" event={"ID":"39641a18-5d13-441f-9956-3777b9f27703","Type":"ContainerStarted","Data":"826933943a2f28d75912a13ee17fa92808cb30113091642bf1a61d9afc72a165"} Dec 05 06:04:16 crc kubenswrapper[4742]: I1205 06:04:16.671834 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:04:16 crc kubenswrapper[4742]: I1205 06:04:16.671904 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:04:22 crc kubenswrapper[4742]: I1205 06:04:22.382868 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-tkmd5" Dec 05 06:04:22 crc kubenswrapper[4742]: I1205 06:04:22.383654 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-tkmd5" Dec 05 06:04:22 crc kubenswrapper[4742]: I1205 06:04:22.842885 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-tkmd5"] Dec 05 06:04:22 crc kubenswrapper[4742]: W1205 06:04:22.851228 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1a34f94f_28c8_4d9a_a52f_675e29d7bcb9.slice/crio-34ab749579aa851cc2d5bb48637767f971ada7f10d342eba746c19fbc40459c5 WatchSource:0}: Error finding container 34ab749579aa851cc2d5bb48637767f971ada7f10d342eba746c19fbc40459c5: Status 404 returned error can't find the container with id 34ab749579aa851cc2d5bb48637767f971ada7f10d342eba746c19fbc40459c5 Dec 05 06:04:22 crc kubenswrapper[4742]: I1205 06:04:22.854258 4742 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 06:04:23 crc kubenswrapper[4742]: I1205 06:04:23.804518 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-tkmd5" event={"ID":"1a34f94f-28c8-4d9a-a52f-675e29d7bcb9","Type":"ContainerStarted","Data":"34ab749579aa851cc2d5bb48637767f971ada7f10d342eba746c19fbc40459c5"} Dec 05 06:04:25 crc kubenswrapper[4742]: I1205 06:04:25.818913 4742 generic.go:334] "Generic (PLEG): container finished" podID="1a34f94f-28c8-4d9a-a52f-675e29d7bcb9" containerID="f0928c82701a9e8552c1567e8b892cc962421838b95273f051da612ae20904c3" exitCode=0 Dec 05 06:04:25 crc kubenswrapper[4742]: I1205 06:04:25.819031 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-tkmd5" event={"ID":"1a34f94f-28c8-4d9a-a52f-675e29d7bcb9","Type":"ContainerDied","Data":"f0928c82701a9e8552c1567e8b892cc962421838b95273f051da612ae20904c3"} Dec 05 06:04:27 crc kubenswrapper[4742]: I1205 06:04:27.083953 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-tkmd5" Dec 05 06:04:27 crc kubenswrapper[4742]: I1205 06:04:27.262289 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xzthc\" (UniqueName: \"kubernetes.io/projected/1a34f94f-28c8-4d9a-a52f-675e29d7bcb9-kube-api-access-xzthc\") pod \"1a34f94f-28c8-4d9a-a52f-675e29d7bcb9\" (UID: \"1a34f94f-28c8-4d9a-a52f-675e29d7bcb9\") " Dec 05 06:04:27 crc kubenswrapper[4742]: I1205 06:04:27.262430 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/1a34f94f-28c8-4d9a-a52f-675e29d7bcb9-crc-storage\") pod \"1a34f94f-28c8-4d9a-a52f-675e29d7bcb9\" (UID: \"1a34f94f-28c8-4d9a-a52f-675e29d7bcb9\") " Dec 05 06:04:27 crc kubenswrapper[4742]: I1205 06:04:27.262491 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/1a34f94f-28c8-4d9a-a52f-675e29d7bcb9-node-mnt\") pod \"1a34f94f-28c8-4d9a-a52f-675e29d7bcb9\" (UID: \"1a34f94f-28c8-4d9a-a52f-675e29d7bcb9\") " Dec 05 06:04:27 crc kubenswrapper[4742]: I1205 06:04:27.262802 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1a34f94f-28c8-4d9a-a52f-675e29d7bcb9-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "1a34f94f-28c8-4d9a-a52f-675e29d7bcb9" (UID: "1a34f94f-28c8-4d9a-a52f-675e29d7bcb9"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:04:27 crc kubenswrapper[4742]: I1205 06:04:27.263150 4742 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/1a34f94f-28c8-4d9a-a52f-675e29d7bcb9-node-mnt\") on node \"crc\" DevicePath \"\"" Dec 05 06:04:27 crc kubenswrapper[4742]: I1205 06:04:27.267955 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a34f94f-28c8-4d9a-a52f-675e29d7bcb9-kube-api-access-xzthc" (OuterVolumeSpecName: "kube-api-access-xzthc") pod "1a34f94f-28c8-4d9a-a52f-675e29d7bcb9" (UID: "1a34f94f-28c8-4d9a-a52f-675e29d7bcb9"). InnerVolumeSpecName "kube-api-access-xzthc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:04:27 crc kubenswrapper[4742]: I1205 06:04:27.276415 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a34f94f-28c8-4d9a-a52f-675e29d7bcb9-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "1a34f94f-28c8-4d9a-a52f-675e29d7bcb9" (UID: "1a34f94f-28c8-4d9a-a52f-675e29d7bcb9"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:04:27 crc kubenswrapper[4742]: I1205 06:04:27.364830 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xzthc\" (UniqueName: \"kubernetes.io/projected/1a34f94f-28c8-4d9a-a52f-675e29d7bcb9-kube-api-access-xzthc\") on node \"crc\" DevicePath \"\"" Dec 05 06:04:27 crc kubenswrapper[4742]: I1205 06:04:27.364885 4742 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/1a34f94f-28c8-4d9a-a52f-675e29d7bcb9-crc-storage\") on node \"crc\" DevicePath \"\"" Dec 05 06:04:27 crc kubenswrapper[4742]: I1205 06:04:27.834434 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-tkmd5" event={"ID":"1a34f94f-28c8-4d9a-a52f-675e29d7bcb9","Type":"ContainerDied","Data":"34ab749579aa851cc2d5bb48637767f971ada7f10d342eba746c19fbc40459c5"} Dec 05 06:04:27 crc kubenswrapper[4742]: I1205 06:04:27.834492 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="34ab749579aa851cc2d5bb48637767f971ada7f10d342eba746c19fbc40459c5" Dec 05 06:04:27 crc kubenswrapper[4742]: I1205 06:04:27.834513 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-tkmd5" Dec 05 06:04:29 crc kubenswrapper[4742]: I1205 06:04:29.881546 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-zbwkg" Dec 05 06:04:34 crc kubenswrapper[4742]: I1205 06:04:34.030364 4742 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 06:04:34 crc kubenswrapper[4742]: I1205 06:04:34.541575 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4"] Dec 05 06:04:34 crc kubenswrapper[4742]: E1205 06:04:34.541797 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a34f94f-28c8-4d9a-a52f-675e29d7bcb9" containerName="storage" Dec 05 06:04:34 crc kubenswrapper[4742]: I1205 06:04:34.541810 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a34f94f-28c8-4d9a-a52f-675e29d7bcb9" containerName="storage" Dec 05 06:04:34 crc kubenswrapper[4742]: I1205 06:04:34.541935 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a34f94f-28c8-4d9a-a52f-675e29d7bcb9" containerName="storage" Dec 05 06:04:34 crc kubenswrapper[4742]: I1205 06:04:34.542795 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4" Dec 05 06:04:34 crc kubenswrapper[4742]: I1205 06:04:34.547912 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 06:04:34 crc kubenswrapper[4742]: I1205 06:04:34.556190 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4"] Dec 05 06:04:34 crc kubenswrapper[4742]: I1205 06:04:34.654847 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/19fa719f-3ace-451f-ba24-3c9a3fc6bc2b-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4\" (UID: \"19fa719f-3ace-451f-ba24-3c9a3fc6bc2b\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4" Dec 05 06:04:34 crc kubenswrapper[4742]: I1205 06:04:34.654989 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/19fa719f-3ace-451f-ba24-3c9a3fc6bc2b-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4\" (UID: \"19fa719f-3ace-451f-ba24-3c9a3fc6bc2b\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4" Dec 05 06:04:34 crc kubenswrapper[4742]: I1205 06:04:34.655103 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlzmm\" (UniqueName: \"kubernetes.io/projected/19fa719f-3ace-451f-ba24-3c9a3fc6bc2b-kube-api-access-zlzmm\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4\" (UID: \"19fa719f-3ace-451f-ba24-3c9a3fc6bc2b\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4" Dec 05 06:04:34 crc kubenswrapper[4742]: I1205 06:04:34.756617 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/19fa719f-3ace-451f-ba24-3c9a3fc6bc2b-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4\" (UID: \"19fa719f-3ace-451f-ba24-3c9a3fc6bc2b\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4" Dec 05 06:04:34 crc kubenswrapper[4742]: I1205 06:04:34.757098 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/19fa719f-3ace-451f-ba24-3c9a3fc6bc2b-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4\" (UID: \"19fa719f-3ace-451f-ba24-3c9a3fc6bc2b\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4" Dec 05 06:04:34 crc kubenswrapper[4742]: I1205 06:04:34.757286 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlzmm\" (UniqueName: \"kubernetes.io/projected/19fa719f-3ace-451f-ba24-3c9a3fc6bc2b-kube-api-access-zlzmm\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4\" (UID: \"19fa719f-3ace-451f-ba24-3c9a3fc6bc2b\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4" Dec 05 06:04:34 crc kubenswrapper[4742]: I1205 06:04:34.757551 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/19fa719f-3ace-451f-ba24-3c9a3fc6bc2b-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4\" (UID: \"19fa719f-3ace-451f-ba24-3c9a3fc6bc2b\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4" Dec 05 06:04:34 crc kubenswrapper[4742]: I1205 06:04:34.757845 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/19fa719f-3ace-451f-ba24-3c9a3fc6bc2b-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4\" (UID: \"19fa719f-3ace-451f-ba24-3c9a3fc6bc2b\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4" Dec 05 06:04:34 crc kubenswrapper[4742]: I1205 06:04:34.782985 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlzmm\" (UniqueName: \"kubernetes.io/projected/19fa719f-3ace-451f-ba24-3c9a3fc6bc2b-kube-api-access-zlzmm\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4\" (UID: \"19fa719f-3ace-451f-ba24-3c9a3fc6bc2b\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4" Dec 05 06:04:34 crc kubenswrapper[4742]: I1205 06:04:34.866879 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4" Dec 05 06:04:35 crc kubenswrapper[4742]: I1205 06:04:35.187143 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4"] Dec 05 06:04:35 crc kubenswrapper[4742]: W1205 06:04:35.198387 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod19fa719f_3ace_451f_ba24_3c9a3fc6bc2b.slice/crio-119a94cd88a6d1723175601960b77c8b3b0a61623e8ef190d6515a01c6c7b104 WatchSource:0}: Error finding container 119a94cd88a6d1723175601960b77c8b3b0a61623e8ef190d6515a01c6c7b104: Status 404 returned error can't find the container with id 119a94cd88a6d1723175601960b77c8b3b0a61623e8ef190d6515a01c6c7b104 Dec 05 06:04:35 crc kubenswrapper[4742]: I1205 06:04:35.885750 4742 generic.go:334] "Generic (PLEG): container finished" podID="19fa719f-3ace-451f-ba24-3c9a3fc6bc2b" containerID="05c6a175f7a46c95965530e9f1e63950c36a445ea9fe96c54f45e500107f83fc" exitCode=0 Dec 05 06:04:35 crc kubenswrapper[4742]: I1205 06:04:35.885821 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4" event={"ID":"19fa719f-3ace-451f-ba24-3c9a3fc6bc2b","Type":"ContainerDied","Data":"05c6a175f7a46c95965530e9f1e63950c36a445ea9fe96c54f45e500107f83fc"} Dec 05 06:04:35 crc kubenswrapper[4742]: I1205 06:04:35.885860 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4" event={"ID":"19fa719f-3ace-451f-ba24-3c9a3fc6bc2b","Type":"ContainerStarted","Data":"119a94cd88a6d1723175601960b77c8b3b0a61623e8ef190d6515a01c6c7b104"} Dec 05 06:04:36 crc kubenswrapper[4742]: I1205 06:04:36.906716 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7ptzj"] Dec 05 06:04:36 crc kubenswrapper[4742]: I1205 06:04:36.908339 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7ptzj" Dec 05 06:04:36 crc kubenswrapper[4742]: I1205 06:04:36.925738 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7ptzj"] Dec 05 06:04:37 crc kubenswrapper[4742]: I1205 06:04:37.092495 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gb968\" (UniqueName: \"kubernetes.io/projected/dd4736d2-5d30-4885-bebb-8806be68ccd8-kube-api-access-gb968\") pod \"redhat-operators-7ptzj\" (UID: \"dd4736d2-5d30-4885-bebb-8806be68ccd8\") " pod="openshift-marketplace/redhat-operators-7ptzj" Dec 05 06:04:37 crc kubenswrapper[4742]: I1205 06:04:37.092566 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd4736d2-5d30-4885-bebb-8806be68ccd8-utilities\") pod \"redhat-operators-7ptzj\" (UID: \"dd4736d2-5d30-4885-bebb-8806be68ccd8\") " pod="openshift-marketplace/redhat-operators-7ptzj" Dec 05 06:04:37 crc kubenswrapper[4742]: I1205 06:04:37.092635 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd4736d2-5d30-4885-bebb-8806be68ccd8-catalog-content\") pod \"redhat-operators-7ptzj\" (UID: \"dd4736d2-5d30-4885-bebb-8806be68ccd8\") " pod="openshift-marketplace/redhat-operators-7ptzj" Dec 05 06:04:37 crc kubenswrapper[4742]: I1205 06:04:37.193775 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd4736d2-5d30-4885-bebb-8806be68ccd8-utilities\") pod \"redhat-operators-7ptzj\" (UID: \"dd4736d2-5d30-4885-bebb-8806be68ccd8\") " pod="openshift-marketplace/redhat-operators-7ptzj" Dec 05 06:04:37 crc kubenswrapper[4742]: I1205 06:04:37.193829 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gb968\" (UniqueName: \"kubernetes.io/projected/dd4736d2-5d30-4885-bebb-8806be68ccd8-kube-api-access-gb968\") pod \"redhat-operators-7ptzj\" (UID: \"dd4736d2-5d30-4885-bebb-8806be68ccd8\") " pod="openshift-marketplace/redhat-operators-7ptzj" Dec 05 06:04:37 crc kubenswrapper[4742]: I1205 06:04:37.194278 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd4736d2-5d30-4885-bebb-8806be68ccd8-utilities\") pod \"redhat-operators-7ptzj\" (UID: \"dd4736d2-5d30-4885-bebb-8806be68ccd8\") " pod="openshift-marketplace/redhat-operators-7ptzj" Dec 05 06:04:37 crc kubenswrapper[4742]: I1205 06:04:37.194522 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd4736d2-5d30-4885-bebb-8806be68ccd8-catalog-content\") pod \"redhat-operators-7ptzj\" (UID: \"dd4736d2-5d30-4885-bebb-8806be68ccd8\") " pod="openshift-marketplace/redhat-operators-7ptzj" Dec 05 06:04:37 crc kubenswrapper[4742]: I1205 06:04:37.194994 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd4736d2-5d30-4885-bebb-8806be68ccd8-catalog-content\") pod \"redhat-operators-7ptzj\" (UID: \"dd4736d2-5d30-4885-bebb-8806be68ccd8\") " pod="openshift-marketplace/redhat-operators-7ptzj" Dec 05 06:04:37 crc kubenswrapper[4742]: I1205 06:04:37.218730 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gb968\" (UniqueName: \"kubernetes.io/projected/dd4736d2-5d30-4885-bebb-8806be68ccd8-kube-api-access-gb968\") pod \"redhat-operators-7ptzj\" (UID: \"dd4736d2-5d30-4885-bebb-8806be68ccd8\") " pod="openshift-marketplace/redhat-operators-7ptzj" Dec 05 06:04:37 crc kubenswrapper[4742]: I1205 06:04:37.228191 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7ptzj" Dec 05 06:04:37 crc kubenswrapper[4742]: I1205 06:04:37.472164 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7ptzj"] Dec 05 06:04:37 crc kubenswrapper[4742]: I1205 06:04:37.901153 4742 generic.go:334] "Generic (PLEG): container finished" podID="19fa719f-3ace-451f-ba24-3c9a3fc6bc2b" containerID="94a88d7f95af8ef99968e5e4771ba8e7ec843874a449c5e624178e7b43129e5f" exitCode=0 Dec 05 06:04:37 crc kubenswrapper[4742]: I1205 06:04:37.901229 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4" event={"ID":"19fa719f-3ace-451f-ba24-3c9a3fc6bc2b","Type":"ContainerDied","Data":"94a88d7f95af8ef99968e5e4771ba8e7ec843874a449c5e624178e7b43129e5f"} Dec 05 06:04:37 crc kubenswrapper[4742]: I1205 06:04:37.903200 4742 generic.go:334] "Generic (PLEG): container finished" podID="dd4736d2-5d30-4885-bebb-8806be68ccd8" containerID="c0a3e710b4969dda15791fd0612f5c691cdbc090b571b51b2ea9dc842bec3280" exitCode=0 Dec 05 06:04:37 crc kubenswrapper[4742]: I1205 06:04:37.903242 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7ptzj" event={"ID":"dd4736d2-5d30-4885-bebb-8806be68ccd8","Type":"ContainerDied","Data":"c0a3e710b4969dda15791fd0612f5c691cdbc090b571b51b2ea9dc842bec3280"} Dec 05 06:04:37 crc kubenswrapper[4742]: I1205 06:04:37.903273 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7ptzj" event={"ID":"dd4736d2-5d30-4885-bebb-8806be68ccd8","Type":"ContainerStarted","Data":"feffc90dbcb72fb8c5683f16a05a14fa08fc3e74e23cbc3bf61b1e57565dddd8"} Dec 05 06:04:38 crc kubenswrapper[4742]: I1205 06:04:38.914017 4742 generic.go:334] "Generic (PLEG): container finished" podID="19fa719f-3ace-451f-ba24-3c9a3fc6bc2b" containerID="626289f95dab4ceec30475926645ac0a2fc3b9a039d71e62e16e74596d1d3b03" exitCode=0 Dec 05 06:04:38 crc kubenswrapper[4742]: I1205 06:04:38.914122 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4" event={"ID":"19fa719f-3ace-451f-ba24-3c9a3fc6bc2b","Type":"ContainerDied","Data":"626289f95dab4ceec30475926645ac0a2fc3b9a039d71e62e16e74596d1d3b03"} Dec 05 06:04:38 crc kubenswrapper[4742]: I1205 06:04:38.918205 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7ptzj" event={"ID":"dd4736d2-5d30-4885-bebb-8806be68ccd8","Type":"ContainerStarted","Data":"49ed849ea9a71104009466c7fb53a9caf6f473d74712934ccbce80cf5eba4cd1"} Dec 05 06:04:39 crc kubenswrapper[4742]: I1205 06:04:39.928340 4742 generic.go:334] "Generic (PLEG): container finished" podID="dd4736d2-5d30-4885-bebb-8806be68ccd8" containerID="49ed849ea9a71104009466c7fb53a9caf6f473d74712934ccbce80cf5eba4cd1" exitCode=0 Dec 05 06:04:39 crc kubenswrapper[4742]: I1205 06:04:39.928459 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7ptzj" event={"ID":"dd4736d2-5d30-4885-bebb-8806be68ccd8","Type":"ContainerDied","Data":"49ed849ea9a71104009466c7fb53a9caf6f473d74712934ccbce80cf5eba4cd1"} Dec 05 06:04:40 crc kubenswrapper[4742]: I1205 06:04:40.198304 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4" Dec 05 06:04:40 crc kubenswrapper[4742]: I1205 06:04:40.341197 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zlzmm\" (UniqueName: \"kubernetes.io/projected/19fa719f-3ace-451f-ba24-3c9a3fc6bc2b-kube-api-access-zlzmm\") pod \"19fa719f-3ace-451f-ba24-3c9a3fc6bc2b\" (UID: \"19fa719f-3ace-451f-ba24-3c9a3fc6bc2b\") " Dec 05 06:04:40 crc kubenswrapper[4742]: I1205 06:04:40.341316 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/19fa719f-3ace-451f-ba24-3c9a3fc6bc2b-bundle\") pod \"19fa719f-3ace-451f-ba24-3c9a3fc6bc2b\" (UID: \"19fa719f-3ace-451f-ba24-3c9a3fc6bc2b\") " Dec 05 06:04:40 crc kubenswrapper[4742]: I1205 06:04:40.341364 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/19fa719f-3ace-451f-ba24-3c9a3fc6bc2b-util\") pod \"19fa719f-3ace-451f-ba24-3c9a3fc6bc2b\" (UID: \"19fa719f-3ace-451f-ba24-3c9a3fc6bc2b\") " Dec 05 06:04:40 crc kubenswrapper[4742]: I1205 06:04:40.341793 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19fa719f-3ace-451f-ba24-3c9a3fc6bc2b-bundle" (OuterVolumeSpecName: "bundle") pod "19fa719f-3ace-451f-ba24-3c9a3fc6bc2b" (UID: "19fa719f-3ace-451f-ba24-3c9a3fc6bc2b"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:04:40 crc kubenswrapper[4742]: I1205 06:04:40.348994 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19fa719f-3ace-451f-ba24-3c9a3fc6bc2b-kube-api-access-zlzmm" (OuterVolumeSpecName: "kube-api-access-zlzmm") pod "19fa719f-3ace-451f-ba24-3c9a3fc6bc2b" (UID: "19fa719f-3ace-451f-ba24-3c9a3fc6bc2b"). InnerVolumeSpecName "kube-api-access-zlzmm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:04:40 crc kubenswrapper[4742]: I1205 06:04:40.372145 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19fa719f-3ace-451f-ba24-3c9a3fc6bc2b-util" (OuterVolumeSpecName: "util") pod "19fa719f-3ace-451f-ba24-3c9a3fc6bc2b" (UID: "19fa719f-3ace-451f-ba24-3c9a3fc6bc2b"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:04:40 crc kubenswrapper[4742]: I1205 06:04:40.442650 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zlzmm\" (UniqueName: \"kubernetes.io/projected/19fa719f-3ace-451f-ba24-3c9a3fc6bc2b-kube-api-access-zlzmm\") on node \"crc\" DevicePath \"\"" Dec 05 06:04:40 crc kubenswrapper[4742]: I1205 06:04:40.442914 4742 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/19fa719f-3ace-451f-ba24-3c9a3fc6bc2b-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:04:40 crc kubenswrapper[4742]: I1205 06:04:40.442927 4742 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/19fa719f-3ace-451f-ba24-3c9a3fc6bc2b-util\") on node \"crc\" DevicePath \"\"" Dec 05 06:04:40 crc kubenswrapper[4742]: I1205 06:04:40.938528 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4" event={"ID":"19fa719f-3ace-451f-ba24-3c9a3fc6bc2b","Type":"ContainerDied","Data":"119a94cd88a6d1723175601960b77c8b3b0a61623e8ef190d6515a01c6c7b104"} Dec 05 06:04:40 crc kubenswrapper[4742]: I1205 06:04:40.938568 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="119a94cd88a6d1723175601960b77c8b3b0a61623e8ef190d6515a01c6c7b104" Dec 05 06:04:40 crc kubenswrapper[4742]: I1205 06:04:40.938627 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4" Dec 05 06:04:40 crc kubenswrapper[4742]: I1205 06:04:40.943543 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7ptzj" event={"ID":"dd4736d2-5d30-4885-bebb-8806be68ccd8","Type":"ContainerStarted","Data":"72bf89ae239209a5e7e6ae9cef50cde90064f6fa0aa34ac4a468e7a76d016c7e"} Dec 05 06:04:41 crc kubenswrapper[4742]: I1205 06:04:41.234986 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7ptzj" podStartSLOduration=2.748050176 podStartE2EDuration="5.234968838s" podCreationTimestamp="2025-12-05 06:04:36 +0000 UTC" firstStartedPulling="2025-12-05 06:04:37.904382452 +0000 UTC m=+753.816517514" lastFinishedPulling="2025-12-05 06:04:40.391301104 +0000 UTC m=+756.303436176" observedRunningTime="2025-12-05 06:04:40.979288441 +0000 UTC m=+756.891423523" watchObservedRunningTime="2025-12-05 06:04:41.234968838 +0000 UTC m=+757.147103910" Dec 05 06:04:44 crc kubenswrapper[4742]: I1205 06:04:44.884455 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-n5mqx"] Dec 05 06:04:44 crc kubenswrapper[4742]: E1205 06:04:44.884858 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19fa719f-3ace-451f-ba24-3c9a3fc6bc2b" containerName="extract" Dec 05 06:04:44 crc kubenswrapper[4742]: I1205 06:04:44.884870 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="19fa719f-3ace-451f-ba24-3c9a3fc6bc2b" containerName="extract" Dec 05 06:04:44 crc kubenswrapper[4742]: E1205 06:04:44.884878 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19fa719f-3ace-451f-ba24-3c9a3fc6bc2b" containerName="util" Dec 05 06:04:44 crc kubenswrapper[4742]: I1205 06:04:44.884884 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="19fa719f-3ace-451f-ba24-3c9a3fc6bc2b" containerName="util" Dec 05 06:04:44 crc kubenswrapper[4742]: E1205 06:04:44.884896 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19fa719f-3ace-451f-ba24-3c9a3fc6bc2b" containerName="pull" Dec 05 06:04:44 crc kubenswrapper[4742]: I1205 06:04:44.884902 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="19fa719f-3ace-451f-ba24-3c9a3fc6bc2b" containerName="pull" Dec 05 06:04:44 crc kubenswrapper[4742]: I1205 06:04:44.885006 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="19fa719f-3ace-451f-ba24-3c9a3fc6bc2b" containerName="extract" Dec 05 06:04:44 crc kubenswrapper[4742]: I1205 06:04:44.885390 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-n5mqx" Dec 05 06:04:44 crc kubenswrapper[4742]: I1205 06:04:44.887719 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 05 06:04:44 crc kubenswrapper[4742]: I1205 06:04:44.889263 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-78ngp" Dec 05 06:04:44 crc kubenswrapper[4742]: I1205 06:04:44.894966 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 05 06:04:44 crc kubenswrapper[4742]: I1205 06:04:44.894990 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-n5mqx"] Dec 05 06:04:45 crc kubenswrapper[4742]: I1205 06:04:45.001534 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67px9\" (UniqueName: \"kubernetes.io/projected/fc296790-fea6-441b-93fc-6e4caed21ba3-kube-api-access-67px9\") pod \"nmstate-operator-5b5b58f5c8-n5mqx\" (UID: \"fc296790-fea6-441b-93fc-6e4caed21ba3\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-n5mqx" Dec 05 06:04:45 crc kubenswrapper[4742]: I1205 06:04:45.102897 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67px9\" (UniqueName: \"kubernetes.io/projected/fc296790-fea6-441b-93fc-6e4caed21ba3-kube-api-access-67px9\") pod \"nmstate-operator-5b5b58f5c8-n5mqx\" (UID: \"fc296790-fea6-441b-93fc-6e4caed21ba3\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-n5mqx" Dec 05 06:04:45 crc kubenswrapper[4742]: I1205 06:04:45.122281 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67px9\" (UniqueName: \"kubernetes.io/projected/fc296790-fea6-441b-93fc-6e4caed21ba3-kube-api-access-67px9\") pod \"nmstate-operator-5b5b58f5c8-n5mqx\" (UID: \"fc296790-fea6-441b-93fc-6e4caed21ba3\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-n5mqx" Dec 05 06:04:45 crc kubenswrapper[4742]: I1205 06:04:45.210427 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-n5mqx" Dec 05 06:04:45 crc kubenswrapper[4742]: I1205 06:04:45.486283 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-n5mqx"] Dec 05 06:04:45 crc kubenswrapper[4742]: W1205 06:04:45.498379 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfc296790_fea6_441b_93fc_6e4caed21ba3.slice/crio-73b685adfc0d96043ee0c501c7c3fa41e1a5b55aeb1bba92c5e1bd6ff12133e3 WatchSource:0}: Error finding container 73b685adfc0d96043ee0c501c7c3fa41e1a5b55aeb1bba92c5e1bd6ff12133e3: Status 404 returned error can't find the container with id 73b685adfc0d96043ee0c501c7c3fa41e1a5b55aeb1bba92c5e1bd6ff12133e3 Dec 05 06:04:45 crc kubenswrapper[4742]: I1205 06:04:45.973023 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-n5mqx" event={"ID":"fc296790-fea6-441b-93fc-6e4caed21ba3","Type":"ContainerStarted","Data":"73b685adfc0d96043ee0c501c7c3fa41e1a5b55aeb1bba92c5e1bd6ff12133e3"} Dec 05 06:04:46 crc kubenswrapper[4742]: I1205 06:04:46.671588 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:04:46 crc kubenswrapper[4742]: I1205 06:04:46.671942 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:04:47 crc kubenswrapper[4742]: I1205 06:04:47.229552 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7ptzj" Dec 05 06:04:47 crc kubenswrapper[4742]: I1205 06:04:47.229637 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7ptzj" Dec 05 06:04:47 crc kubenswrapper[4742]: I1205 06:04:47.278529 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7ptzj" Dec 05 06:04:48 crc kubenswrapper[4742]: I1205 06:04:48.056435 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7ptzj" Dec 05 06:04:48 crc kubenswrapper[4742]: I1205 06:04:48.993237 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-n5mqx" event={"ID":"fc296790-fea6-441b-93fc-6e4caed21ba3","Type":"ContainerStarted","Data":"de801d7c63eac596317f39fa8968920354b377c9322e92d5d248b14021771dfe"} Dec 05 06:04:49 crc kubenswrapper[4742]: I1205 06:04:49.022495 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-n5mqx" podStartSLOduration=2.54934066 podStartE2EDuration="5.022476711s" podCreationTimestamp="2025-12-05 06:04:44 +0000 UTC" firstStartedPulling="2025-12-05 06:04:45.502019076 +0000 UTC m=+761.414154148" lastFinishedPulling="2025-12-05 06:04:47.975155137 +0000 UTC m=+763.887290199" observedRunningTime="2025-12-05 06:04:49.017464959 +0000 UTC m=+764.929600021" watchObservedRunningTime="2025-12-05 06:04:49.022476711 +0000 UTC m=+764.934611783" Dec 05 06:04:49 crc kubenswrapper[4742]: I1205 06:04:49.693713 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7ptzj"] Dec 05 06:04:49 crc kubenswrapper[4742]: I1205 06:04:49.998821 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-7ptzj" podUID="dd4736d2-5d30-4885-bebb-8806be68ccd8" containerName="registry-server" containerID="cri-o://72bf89ae239209a5e7e6ae9cef50cde90064f6fa0aa34ac4a468e7a76d016c7e" gracePeriod=2 Dec 05 06:04:51 crc kubenswrapper[4742]: I1205 06:04:51.629467 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7ptzj" Dec 05 06:04:51 crc kubenswrapper[4742]: I1205 06:04:51.795805 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd4736d2-5d30-4885-bebb-8806be68ccd8-catalog-content\") pod \"dd4736d2-5d30-4885-bebb-8806be68ccd8\" (UID: \"dd4736d2-5d30-4885-bebb-8806be68ccd8\") " Dec 05 06:04:51 crc kubenswrapper[4742]: I1205 06:04:51.795960 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd4736d2-5d30-4885-bebb-8806be68ccd8-utilities\") pod \"dd4736d2-5d30-4885-bebb-8806be68ccd8\" (UID: \"dd4736d2-5d30-4885-bebb-8806be68ccd8\") " Dec 05 06:04:51 crc kubenswrapper[4742]: I1205 06:04:51.796030 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gb968\" (UniqueName: \"kubernetes.io/projected/dd4736d2-5d30-4885-bebb-8806be68ccd8-kube-api-access-gb968\") pod \"dd4736d2-5d30-4885-bebb-8806be68ccd8\" (UID: \"dd4736d2-5d30-4885-bebb-8806be68ccd8\") " Dec 05 06:04:51 crc kubenswrapper[4742]: I1205 06:04:51.798239 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd4736d2-5d30-4885-bebb-8806be68ccd8-utilities" (OuterVolumeSpecName: "utilities") pod "dd4736d2-5d30-4885-bebb-8806be68ccd8" (UID: "dd4736d2-5d30-4885-bebb-8806be68ccd8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:04:51 crc kubenswrapper[4742]: I1205 06:04:51.809094 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd4736d2-5d30-4885-bebb-8806be68ccd8-kube-api-access-gb968" (OuterVolumeSpecName: "kube-api-access-gb968") pod "dd4736d2-5d30-4885-bebb-8806be68ccd8" (UID: "dd4736d2-5d30-4885-bebb-8806be68ccd8"). InnerVolumeSpecName "kube-api-access-gb968". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:04:51 crc kubenswrapper[4742]: I1205 06:04:51.897579 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd4736d2-5d30-4885-bebb-8806be68ccd8-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:04:51 crc kubenswrapper[4742]: I1205 06:04:51.897759 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gb968\" (UniqueName: \"kubernetes.io/projected/dd4736d2-5d30-4885-bebb-8806be68ccd8-kube-api-access-gb968\") on node \"crc\" DevicePath \"\"" Dec 05 06:04:51 crc kubenswrapper[4742]: I1205 06:04:51.946204 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd4736d2-5d30-4885-bebb-8806be68ccd8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dd4736d2-5d30-4885-bebb-8806be68ccd8" (UID: "dd4736d2-5d30-4885-bebb-8806be68ccd8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:04:51 crc kubenswrapper[4742]: I1205 06:04:51.998914 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd4736d2-5d30-4885-bebb-8806be68ccd8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:04:52 crc kubenswrapper[4742]: I1205 06:04:52.016445 4742 generic.go:334] "Generic (PLEG): container finished" podID="dd4736d2-5d30-4885-bebb-8806be68ccd8" containerID="72bf89ae239209a5e7e6ae9cef50cde90064f6fa0aa34ac4a468e7a76d016c7e" exitCode=0 Dec 05 06:04:52 crc kubenswrapper[4742]: I1205 06:04:52.016522 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7ptzj" event={"ID":"dd4736d2-5d30-4885-bebb-8806be68ccd8","Type":"ContainerDied","Data":"72bf89ae239209a5e7e6ae9cef50cde90064f6fa0aa34ac4a468e7a76d016c7e"} Dec 05 06:04:52 crc kubenswrapper[4742]: I1205 06:04:52.016771 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7ptzj" event={"ID":"dd4736d2-5d30-4885-bebb-8806be68ccd8","Type":"ContainerDied","Data":"feffc90dbcb72fb8c5683f16a05a14fa08fc3e74e23cbc3bf61b1e57565dddd8"} Dec 05 06:04:52 crc kubenswrapper[4742]: I1205 06:04:52.016571 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7ptzj" Dec 05 06:04:52 crc kubenswrapper[4742]: I1205 06:04:52.016887 4742 scope.go:117] "RemoveContainer" containerID="72bf89ae239209a5e7e6ae9cef50cde90064f6fa0aa34ac4a468e7a76d016c7e" Dec 05 06:04:52 crc kubenswrapper[4742]: I1205 06:04:52.039200 4742 scope.go:117] "RemoveContainer" containerID="49ed849ea9a71104009466c7fb53a9caf6f473d74712934ccbce80cf5eba4cd1" Dec 05 06:04:52 crc kubenswrapper[4742]: I1205 06:04:52.062507 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7ptzj"] Dec 05 06:04:52 crc kubenswrapper[4742]: I1205 06:04:52.068093 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-7ptzj"] Dec 05 06:04:52 crc kubenswrapper[4742]: I1205 06:04:52.081938 4742 scope.go:117] "RemoveContainer" containerID="c0a3e710b4969dda15791fd0612f5c691cdbc090b571b51b2ea9dc842bec3280" Dec 05 06:04:52 crc kubenswrapper[4742]: I1205 06:04:52.097191 4742 scope.go:117] "RemoveContainer" containerID="72bf89ae239209a5e7e6ae9cef50cde90064f6fa0aa34ac4a468e7a76d016c7e" Dec 05 06:04:52 crc kubenswrapper[4742]: E1205 06:04:52.097937 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72bf89ae239209a5e7e6ae9cef50cde90064f6fa0aa34ac4a468e7a76d016c7e\": container with ID starting with 72bf89ae239209a5e7e6ae9cef50cde90064f6fa0aa34ac4a468e7a76d016c7e not found: ID does not exist" containerID="72bf89ae239209a5e7e6ae9cef50cde90064f6fa0aa34ac4a468e7a76d016c7e" Dec 05 06:04:52 crc kubenswrapper[4742]: I1205 06:04:52.097962 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72bf89ae239209a5e7e6ae9cef50cde90064f6fa0aa34ac4a468e7a76d016c7e"} err="failed to get container status \"72bf89ae239209a5e7e6ae9cef50cde90064f6fa0aa34ac4a468e7a76d016c7e\": rpc error: code = NotFound desc = could not find container \"72bf89ae239209a5e7e6ae9cef50cde90064f6fa0aa34ac4a468e7a76d016c7e\": container with ID starting with 72bf89ae239209a5e7e6ae9cef50cde90064f6fa0aa34ac4a468e7a76d016c7e not found: ID does not exist" Dec 05 06:04:52 crc kubenswrapper[4742]: I1205 06:04:52.097981 4742 scope.go:117] "RemoveContainer" containerID="49ed849ea9a71104009466c7fb53a9caf6f473d74712934ccbce80cf5eba4cd1" Dec 05 06:04:52 crc kubenswrapper[4742]: E1205 06:04:52.098301 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49ed849ea9a71104009466c7fb53a9caf6f473d74712934ccbce80cf5eba4cd1\": container with ID starting with 49ed849ea9a71104009466c7fb53a9caf6f473d74712934ccbce80cf5eba4cd1 not found: ID does not exist" containerID="49ed849ea9a71104009466c7fb53a9caf6f473d74712934ccbce80cf5eba4cd1" Dec 05 06:04:52 crc kubenswrapper[4742]: I1205 06:04:52.098324 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49ed849ea9a71104009466c7fb53a9caf6f473d74712934ccbce80cf5eba4cd1"} err="failed to get container status \"49ed849ea9a71104009466c7fb53a9caf6f473d74712934ccbce80cf5eba4cd1\": rpc error: code = NotFound desc = could not find container \"49ed849ea9a71104009466c7fb53a9caf6f473d74712934ccbce80cf5eba4cd1\": container with ID starting with 49ed849ea9a71104009466c7fb53a9caf6f473d74712934ccbce80cf5eba4cd1 not found: ID does not exist" Dec 05 06:04:52 crc kubenswrapper[4742]: I1205 06:04:52.098337 4742 scope.go:117] "RemoveContainer" containerID="c0a3e710b4969dda15791fd0612f5c691cdbc090b571b51b2ea9dc842bec3280" Dec 05 06:04:52 crc kubenswrapper[4742]: E1205 06:04:52.098631 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0a3e710b4969dda15791fd0612f5c691cdbc090b571b51b2ea9dc842bec3280\": container with ID starting with c0a3e710b4969dda15791fd0612f5c691cdbc090b571b51b2ea9dc842bec3280 not found: ID does not exist" containerID="c0a3e710b4969dda15791fd0612f5c691cdbc090b571b51b2ea9dc842bec3280" Dec 05 06:04:52 crc kubenswrapper[4742]: I1205 06:04:52.098649 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0a3e710b4969dda15791fd0612f5c691cdbc090b571b51b2ea9dc842bec3280"} err="failed to get container status \"c0a3e710b4969dda15791fd0612f5c691cdbc090b571b51b2ea9dc842bec3280\": rpc error: code = NotFound desc = could not find container \"c0a3e710b4969dda15791fd0612f5c691cdbc090b571b51b2ea9dc842bec3280\": container with ID starting with c0a3e710b4969dda15791fd0612f5c691cdbc090b571b51b2ea9dc842bec3280 not found: ID does not exist" Dec 05 06:04:52 crc kubenswrapper[4742]: I1205 06:04:52.393812 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd4736d2-5d30-4885-bebb-8806be68ccd8" path="/var/lib/kubelet/pods/dd4736d2-5d30-4885-bebb-8806be68ccd8/volumes" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.092569 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-4xvgt"] Dec 05 06:04:55 crc kubenswrapper[4742]: E1205 06:04:55.093246 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd4736d2-5d30-4885-bebb-8806be68ccd8" containerName="registry-server" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.093264 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd4736d2-5d30-4885-bebb-8806be68ccd8" containerName="registry-server" Dec 05 06:04:55 crc kubenswrapper[4742]: E1205 06:04:55.093282 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd4736d2-5d30-4885-bebb-8806be68ccd8" containerName="extract-utilities" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.093290 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd4736d2-5d30-4885-bebb-8806be68ccd8" containerName="extract-utilities" Dec 05 06:04:55 crc kubenswrapper[4742]: E1205 06:04:55.093305 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd4736d2-5d30-4885-bebb-8806be68ccd8" containerName="extract-content" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.093316 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd4736d2-5d30-4885-bebb-8806be68ccd8" containerName="extract-content" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.093509 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd4736d2-5d30-4885-bebb-8806be68ccd8" containerName="registry-server" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.094370 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-4xvgt" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.100462 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-7zxqr"] Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.101673 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-7zxqr" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.115539 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.119214 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-lkpfd" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.133540 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-4xvgt"] Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.150099 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-7zxqr"] Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.209106 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-l8d88"] Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.210280 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-l8d88" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.270349 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvjlj\" (UniqueName: \"kubernetes.io/projected/35a6361c-11cd-440d-ad6e-93929d21e8f2-kube-api-access-wvjlj\") pod \"nmstate-webhook-5f6d4c5ccb-7zxqr\" (UID: \"35a6361c-11cd-440d-ad6e-93929d21e8f2\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-7zxqr" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.270404 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwkhj\" (UniqueName: \"kubernetes.io/projected/e78be74b-15df-44da-9b82-909e008442b0-kube-api-access-zwkhj\") pod \"nmstate-metrics-7f946cbc9-4xvgt\" (UID: \"e78be74b-15df-44da-9b82-909e008442b0\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-4xvgt" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.270427 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/35a6361c-11cd-440d-ad6e-93929d21e8f2-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-7zxqr\" (UID: \"35a6361c-11cd-440d-ad6e-93929d21e8f2\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-7zxqr" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.296224 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jsbxs"] Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.296805 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jsbxs" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.298277 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.298691 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-bdpjd" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.298846 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.306476 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jsbxs"] Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.371141 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lsdvv\" (UniqueName: \"kubernetes.io/projected/dc1bcd64-15c2-4fec-ac72-167371e50892-kube-api-access-lsdvv\") pod \"nmstate-handler-l8d88\" (UID: \"dc1bcd64-15c2-4fec-ac72-167371e50892\") " pod="openshift-nmstate/nmstate-handler-l8d88" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.371175 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/dc1bcd64-15c2-4fec-ac72-167371e50892-dbus-socket\") pod \"nmstate-handler-l8d88\" (UID: \"dc1bcd64-15c2-4fec-ac72-167371e50892\") " pod="openshift-nmstate/nmstate-handler-l8d88" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.371199 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwkhj\" (UniqueName: \"kubernetes.io/projected/e78be74b-15df-44da-9b82-909e008442b0-kube-api-access-zwkhj\") pod \"nmstate-metrics-7f946cbc9-4xvgt\" (UID: \"e78be74b-15df-44da-9b82-909e008442b0\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-4xvgt" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.371214 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/dc1bcd64-15c2-4fec-ac72-167371e50892-ovs-socket\") pod \"nmstate-handler-l8d88\" (UID: \"dc1bcd64-15c2-4fec-ac72-167371e50892\") " pod="openshift-nmstate/nmstate-handler-l8d88" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.371233 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/dc1bcd64-15c2-4fec-ac72-167371e50892-nmstate-lock\") pod \"nmstate-handler-l8d88\" (UID: \"dc1bcd64-15c2-4fec-ac72-167371e50892\") " pod="openshift-nmstate/nmstate-handler-l8d88" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.371249 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/35a6361c-11cd-440d-ad6e-93929d21e8f2-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-7zxqr\" (UID: \"35a6361c-11cd-440d-ad6e-93929d21e8f2\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-7zxqr" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.371308 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvjlj\" (UniqueName: \"kubernetes.io/projected/35a6361c-11cd-440d-ad6e-93929d21e8f2-kube-api-access-wvjlj\") pod \"nmstate-webhook-5f6d4c5ccb-7zxqr\" (UID: \"35a6361c-11cd-440d-ad6e-93929d21e8f2\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-7zxqr" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.375609 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/35a6361c-11cd-440d-ad6e-93929d21e8f2-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-7zxqr\" (UID: \"35a6361c-11cd-440d-ad6e-93929d21e8f2\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-7zxqr" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.402475 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvjlj\" (UniqueName: \"kubernetes.io/projected/35a6361c-11cd-440d-ad6e-93929d21e8f2-kube-api-access-wvjlj\") pod \"nmstate-webhook-5f6d4c5ccb-7zxqr\" (UID: \"35a6361c-11cd-440d-ad6e-93929d21e8f2\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-7zxqr" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.410636 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwkhj\" (UniqueName: \"kubernetes.io/projected/e78be74b-15df-44da-9b82-909e008442b0-kube-api-access-zwkhj\") pod \"nmstate-metrics-7f946cbc9-4xvgt\" (UID: \"e78be74b-15df-44da-9b82-909e008442b0\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-4xvgt" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.411525 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-4xvgt" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.422511 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-7zxqr" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.473595 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lsdvv\" (UniqueName: \"kubernetes.io/projected/dc1bcd64-15c2-4fec-ac72-167371e50892-kube-api-access-lsdvv\") pod \"nmstate-handler-l8d88\" (UID: \"dc1bcd64-15c2-4fec-ac72-167371e50892\") " pod="openshift-nmstate/nmstate-handler-l8d88" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.481436 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/dc1bcd64-15c2-4fec-ac72-167371e50892-dbus-socket\") pod \"nmstate-handler-l8d88\" (UID: \"dc1bcd64-15c2-4fec-ac72-167371e50892\") " pod="openshift-nmstate/nmstate-handler-l8d88" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.481483 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/dc1bcd64-15c2-4fec-ac72-167371e50892-ovs-socket\") pod \"nmstate-handler-l8d88\" (UID: \"dc1bcd64-15c2-4fec-ac72-167371e50892\") " pod="openshift-nmstate/nmstate-handler-l8d88" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.481510 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/dc1bcd64-15c2-4fec-ac72-167371e50892-nmstate-lock\") pod \"nmstate-handler-l8d88\" (UID: \"dc1bcd64-15c2-4fec-ac72-167371e50892\") " pod="openshift-nmstate/nmstate-handler-l8d88" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.481602 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/b2b02f02-8470-4c8a-9f75-774905266432-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-jsbxs\" (UID: \"b2b02f02-8470-4c8a-9f75-774905266432\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jsbxs" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.481634 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tqsm\" (UniqueName: \"kubernetes.io/projected/b2b02f02-8470-4c8a-9f75-774905266432-kube-api-access-4tqsm\") pod \"nmstate-console-plugin-7fbb5f6569-jsbxs\" (UID: \"b2b02f02-8470-4c8a-9f75-774905266432\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jsbxs" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.481661 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/b2b02f02-8470-4c8a-9f75-774905266432-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-jsbxs\" (UID: \"b2b02f02-8470-4c8a-9f75-774905266432\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jsbxs" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.482111 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/dc1bcd64-15c2-4fec-ac72-167371e50892-dbus-socket\") pod \"nmstate-handler-l8d88\" (UID: \"dc1bcd64-15c2-4fec-ac72-167371e50892\") " pod="openshift-nmstate/nmstate-handler-l8d88" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.482147 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/dc1bcd64-15c2-4fec-ac72-167371e50892-ovs-socket\") pod \"nmstate-handler-l8d88\" (UID: \"dc1bcd64-15c2-4fec-ac72-167371e50892\") " pod="openshift-nmstate/nmstate-handler-l8d88" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.482168 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/dc1bcd64-15c2-4fec-ac72-167371e50892-nmstate-lock\") pod \"nmstate-handler-l8d88\" (UID: \"dc1bcd64-15c2-4fec-ac72-167371e50892\") " pod="openshift-nmstate/nmstate-handler-l8d88" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.492751 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lsdvv\" (UniqueName: \"kubernetes.io/projected/dc1bcd64-15c2-4fec-ac72-167371e50892-kube-api-access-lsdvv\") pod \"nmstate-handler-l8d88\" (UID: \"dc1bcd64-15c2-4fec-ac72-167371e50892\") " pod="openshift-nmstate/nmstate-handler-l8d88" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.514871 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-bcf7ddbbf-wzwnk"] Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.515517 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-bcf7ddbbf-wzwnk" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.537519 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-bcf7ddbbf-wzwnk"] Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.554735 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-l8d88" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.583770 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/da0fe601-a8f3-44cc-bf82-ce46714ecd05-trusted-ca-bundle\") pod \"console-bcf7ddbbf-wzwnk\" (UID: \"da0fe601-a8f3-44cc-bf82-ce46714ecd05\") " pod="openshift-console/console-bcf7ddbbf-wzwnk" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.583816 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/da0fe601-a8f3-44cc-bf82-ce46714ecd05-console-config\") pod \"console-bcf7ddbbf-wzwnk\" (UID: \"da0fe601-a8f3-44cc-bf82-ce46714ecd05\") " pod="openshift-console/console-bcf7ddbbf-wzwnk" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.583864 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/da0fe601-a8f3-44cc-bf82-ce46714ecd05-oauth-serving-cert\") pod \"console-bcf7ddbbf-wzwnk\" (UID: \"da0fe601-a8f3-44cc-bf82-ce46714ecd05\") " pod="openshift-console/console-bcf7ddbbf-wzwnk" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.583955 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/da0fe601-a8f3-44cc-bf82-ce46714ecd05-console-serving-cert\") pod \"console-bcf7ddbbf-wzwnk\" (UID: \"da0fe601-a8f3-44cc-bf82-ce46714ecd05\") " pod="openshift-console/console-bcf7ddbbf-wzwnk" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.584020 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/da0fe601-a8f3-44cc-bf82-ce46714ecd05-service-ca\") pod \"console-bcf7ddbbf-wzwnk\" (UID: \"da0fe601-a8f3-44cc-bf82-ce46714ecd05\") " pod="openshift-console/console-bcf7ddbbf-wzwnk" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.584049 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nx2qp\" (UniqueName: \"kubernetes.io/projected/da0fe601-a8f3-44cc-bf82-ce46714ecd05-kube-api-access-nx2qp\") pod \"console-bcf7ddbbf-wzwnk\" (UID: \"da0fe601-a8f3-44cc-bf82-ce46714ecd05\") " pod="openshift-console/console-bcf7ddbbf-wzwnk" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.584137 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/b2b02f02-8470-4c8a-9f75-774905266432-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-jsbxs\" (UID: \"b2b02f02-8470-4c8a-9f75-774905266432\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jsbxs" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.584170 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tqsm\" (UniqueName: \"kubernetes.io/projected/b2b02f02-8470-4c8a-9f75-774905266432-kube-api-access-4tqsm\") pod \"nmstate-console-plugin-7fbb5f6569-jsbxs\" (UID: \"b2b02f02-8470-4c8a-9f75-774905266432\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jsbxs" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.584195 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/da0fe601-a8f3-44cc-bf82-ce46714ecd05-console-oauth-config\") pod \"console-bcf7ddbbf-wzwnk\" (UID: \"da0fe601-a8f3-44cc-bf82-ce46714ecd05\") " pod="openshift-console/console-bcf7ddbbf-wzwnk" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.584226 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/b2b02f02-8470-4c8a-9f75-774905266432-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-jsbxs\" (UID: \"b2b02f02-8470-4c8a-9f75-774905266432\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jsbxs" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.584922 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/b2b02f02-8470-4c8a-9f75-774905266432-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-jsbxs\" (UID: \"b2b02f02-8470-4c8a-9f75-774905266432\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jsbxs" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.587720 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/b2b02f02-8470-4c8a-9f75-774905266432-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-jsbxs\" (UID: \"b2b02f02-8470-4c8a-9f75-774905266432\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jsbxs" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.601684 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tqsm\" (UniqueName: \"kubernetes.io/projected/b2b02f02-8470-4c8a-9f75-774905266432-kube-api-access-4tqsm\") pod \"nmstate-console-plugin-7fbb5f6569-jsbxs\" (UID: \"b2b02f02-8470-4c8a-9f75-774905266432\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jsbxs" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.612468 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jsbxs" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.685947 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/da0fe601-a8f3-44cc-bf82-ce46714ecd05-console-oauth-config\") pod \"console-bcf7ddbbf-wzwnk\" (UID: \"da0fe601-a8f3-44cc-bf82-ce46714ecd05\") " pod="openshift-console/console-bcf7ddbbf-wzwnk" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.686033 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/da0fe601-a8f3-44cc-bf82-ce46714ecd05-trusted-ca-bundle\") pod \"console-bcf7ddbbf-wzwnk\" (UID: \"da0fe601-a8f3-44cc-bf82-ce46714ecd05\") " pod="openshift-console/console-bcf7ddbbf-wzwnk" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.686088 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/da0fe601-a8f3-44cc-bf82-ce46714ecd05-console-config\") pod \"console-bcf7ddbbf-wzwnk\" (UID: \"da0fe601-a8f3-44cc-bf82-ce46714ecd05\") " pod="openshift-console/console-bcf7ddbbf-wzwnk" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.686119 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/da0fe601-a8f3-44cc-bf82-ce46714ecd05-oauth-serving-cert\") pod \"console-bcf7ddbbf-wzwnk\" (UID: \"da0fe601-a8f3-44cc-bf82-ce46714ecd05\") " pod="openshift-console/console-bcf7ddbbf-wzwnk" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.686142 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/da0fe601-a8f3-44cc-bf82-ce46714ecd05-console-serving-cert\") pod \"console-bcf7ddbbf-wzwnk\" (UID: \"da0fe601-a8f3-44cc-bf82-ce46714ecd05\") " pod="openshift-console/console-bcf7ddbbf-wzwnk" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.686158 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/da0fe601-a8f3-44cc-bf82-ce46714ecd05-service-ca\") pod \"console-bcf7ddbbf-wzwnk\" (UID: \"da0fe601-a8f3-44cc-bf82-ce46714ecd05\") " pod="openshift-console/console-bcf7ddbbf-wzwnk" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.686174 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nx2qp\" (UniqueName: \"kubernetes.io/projected/da0fe601-a8f3-44cc-bf82-ce46714ecd05-kube-api-access-nx2qp\") pod \"console-bcf7ddbbf-wzwnk\" (UID: \"da0fe601-a8f3-44cc-bf82-ce46714ecd05\") " pod="openshift-console/console-bcf7ddbbf-wzwnk" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.690617 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/da0fe601-a8f3-44cc-bf82-ce46714ecd05-trusted-ca-bundle\") pod \"console-bcf7ddbbf-wzwnk\" (UID: \"da0fe601-a8f3-44cc-bf82-ce46714ecd05\") " pod="openshift-console/console-bcf7ddbbf-wzwnk" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.691512 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/da0fe601-a8f3-44cc-bf82-ce46714ecd05-service-ca\") pod \"console-bcf7ddbbf-wzwnk\" (UID: \"da0fe601-a8f3-44cc-bf82-ce46714ecd05\") " pod="openshift-console/console-bcf7ddbbf-wzwnk" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.691538 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/da0fe601-a8f3-44cc-bf82-ce46714ecd05-oauth-serving-cert\") pod \"console-bcf7ddbbf-wzwnk\" (UID: \"da0fe601-a8f3-44cc-bf82-ce46714ecd05\") " pod="openshift-console/console-bcf7ddbbf-wzwnk" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.691994 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/da0fe601-a8f3-44cc-bf82-ce46714ecd05-console-oauth-config\") pod \"console-bcf7ddbbf-wzwnk\" (UID: \"da0fe601-a8f3-44cc-bf82-ce46714ecd05\") " pod="openshift-console/console-bcf7ddbbf-wzwnk" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.692365 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/da0fe601-a8f3-44cc-bf82-ce46714ecd05-console-config\") pod \"console-bcf7ddbbf-wzwnk\" (UID: \"da0fe601-a8f3-44cc-bf82-ce46714ecd05\") " pod="openshift-console/console-bcf7ddbbf-wzwnk" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.693336 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/da0fe601-a8f3-44cc-bf82-ce46714ecd05-console-serving-cert\") pod \"console-bcf7ddbbf-wzwnk\" (UID: \"da0fe601-a8f3-44cc-bf82-ce46714ecd05\") " pod="openshift-console/console-bcf7ddbbf-wzwnk" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.700627 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nx2qp\" (UniqueName: \"kubernetes.io/projected/da0fe601-a8f3-44cc-bf82-ce46714ecd05-kube-api-access-nx2qp\") pod \"console-bcf7ddbbf-wzwnk\" (UID: \"da0fe601-a8f3-44cc-bf82-ce46714ecd05\") " pod="openshift-console/console-bcf7ddbbf-wzwnk" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.728225 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-7zxqr"] Dec 05 06:04:55 crc kubenswrapper[4742]: W1205 06:04:55.736913 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod35a6361c_11cd_440d_ad6e_93929d21e8f2.slice/crio-9c9d8ecb525b098dde5d0395790219ff63217fca5573d0b2d175df24c2ff2cb6 WatchSource:0}: Error finding container 9c9d8ecb525b098dde5d0395790219ff63217fca5573d0b2d175df24c2ff2cb6: Status 404 returned error can't find the container with id 9c9d8ecb525b098dde5d0395790219ff63217fca5573d0b2d175df24c2ff2cb6 Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.811757 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jsbxs"] Dec 05 06:04:55 crc kubenswrapper[4742]: W1205 06:04:55.817203 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb2b02f02_8470_4c8a_9f75_774905266432.slice/crio-8a4f08154acf5752f64663eeefeb88d77a48c907b46ab857f8074a8bf085cfd8 WatchSource:0}: Error finding container 8a4f08154acf5752f64663eeefeb88d77a48c907b46ab857f8074a8bf085cfd8: Status 404 returned error can't find the container with id 8a4f08154acf5752f64663eeefeb88d77a48c907b46ab857f8074a8bf085cfd8 Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.849861 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-bcf7ddbbf-wzwnk" Dec 05 06:04:55 crc kubenswrapper[4742]: I1205 06:04:55.885497 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-4xvgt"] Dec 05 06:04:55 crc kubenswrapper[4742]: W1205 06:04:55.892011 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode78be74b_15df_44da_9b82_909e008442b0.slice/crio-7dea950e16442dd8674a8b22066833bfa9464a4440e6ac012c7460474d0fa02a WatchSource:0}: Error finding container 7dea950e16442dd8674a8b22066833bfa9464a4440e6ac012c7460474d0fa02a: Status 404 returned error can't find the container with id 7dea950e16442dd8674a8b22066833bfa9464a4440e6ac012c7460474d0fa02a Dec 05 06:04:56 crc kubenswrapper[4742]: I1205 06:04:56.056217 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jsbxs" event={"ID":"b2b02f02-8470-4c8a-9f75-774905266432","Type":"ContainerStarted","Data":"8a4f08154acf5752f64663eeefeb88d77a48c907b46ab857f8074a8bf085cfd8"} Dec 05 06:04:56 crc kubenswrapper[4742]: I1205 06:04:56.057666 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-l8d88" event={"ID":"dc1bcd64-15c2-4fec-ac72-167371e50892","Type":"ContainerStarted","Data":"96e7ef5f25cd928c9b103f1e109f61e0785594c824a26883e740cab2e71e4634"} Dec 05 06:04:56 crc kubenswrapper[4742]: I1205 06:04:56.058769 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-7zxqr" event={"ID":"35a6361c-11cd-440d-ad6e-93929d21e8f2","Type":"ContainerStarted","Data":"9c9d8ecb525b098dde5d0395790219ff63217fca5573d0b2d175df24c2ff2cb6"} Dec 05 06:04:56 crc kubenswrapper[4742]: I1205 06:04:56.059720 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-4xvgt" event={"ID":"e78be74b-15df-44da-9b82-909e008442b0","Type":"ContainerStarted","Data":"7dea950e16442dd8674a8b22066833bfa9464a4440e6ac012c7460474d0fa02a"} Dec 05 06:04:56 crc kubenswrapper[4742]: I1205 06:04:56.243661 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-bcf7ddbbf-wzwnk"] Dec 05 06:04:56 crc kubenswrapper[4742]: W1205 06:04:56.247449 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podda0fe601_a8f3_44cc_bf82_ce46714ecd05.slice/crio-242bbd0072e7453e5662e26e0b249052b2c83501ef5f789947891d6e41d208a5 WatchSource:0}: Error finding container 242bbd0072e7453e5662e26e0b249052b2c83501ef5f789947891d6e41d208a5: Status 404 returned error can't find the container with id 242bbd0072e7453e5662e26e0b249052b2c83501ef5f789947891d6e41d208a5 Dec 05 06:04:57 crc kubenswrapper[4742]: I1205 06:04:57.067219 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-bcf7ddbbf-wzwnk" event={"ID":"da0fe601-a8f3-44cc-bf82-ce46714ecd05","Type":"ContainerStarted","Data":"a610bdaa9b8d607cca543df7f91877f7050b899436f518318228a2544f2807d6"} Dec 05 06:04:57 crc kubenswrapper[4742]: I1205 06:04:57.067558 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-bcf7ddbbf-wzwnk" event={"ID":"da0fe601-a8f3-44cc-bf82-ce46714ecd05","Type":"ContainerStarted","Data":"242bbd0072e7453e5662e26e0b249052b2c83501ef5f789947891d6e41d208a5"} Dec 05 06:04:57 crc kubenswrapper[4742]: I1205 06:04:57.089331 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-bcf7ddbbf-wzwnk" podStartSLOduration=2.089315222 podStartE2EDuration="2.089315222s" podCreationTimestamp="2025-12-05 06:04:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:04:57.085641795 +0000 UTC m=+772.997776887" watchObservedRunningTime="2025-12-05 06:04:57.089315222 +0000 UTC m=+773.001450284" Dec 05 06:04:59 crc kubenswrapper[4742]: I1205 06:04:59.081572 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-l8d88" event={"ID":"dc1bcd64-15c2-4fec-ac72-167371e50892","Type":"ContainerStarted","Data":"13566ae845248a7f50ea7a43b61d8ade5c57399e746caf717b8902332d91af40"} Dec 05 06:04:59 crc kubenswrapper[4742]: I1205 06:04:59.082338 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-l8d88" Dec 05 06:04:59 crc kubenswrapper[4742]: I1205 06:04:59.086395 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-7zxqr" event={"ID":"35a6361c-11cd-440d-ad6e-93929d21e8f2","Type":"ContainerStarted","Data":"87a3b050fbf20af1c87af0b6fb00d2014d21375519d8337f4e8ae0b1f285ba3c"} Dec 05 06:04:59 crc kubenswrapper[4742]: I1205 06:04:59.086663 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-7zxqr" Dec 05 06:04:59 crc kubenswrapper[4742]: I1205 06:04:59.089215 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-4xvgt" event={"ID":"e78be74b-15df-44da-9b82-909e008442b0","Type":"ContainerStarted","Data":"0935807c79074788ad131cc635122a8b5a97f26d3ade508b56d69dd624572e36"} Dec 05 06:04:59 crc kubenswrapper[4742]: I1205 06:04:59.091651 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jsbxs" event={"ID":"b2b02f02-8470-4c8a-9f75-774905266432","Type":"ContainerStarted","Data":"c2d35b89e488b9d2950efe43ae95019ca97431a2416ac56aed655cbbc0775a99"} Dec 05 06:04:59 crc kubenswrapper[4742]: I1205 06:04:59.107239 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-l8d88" podStartSLOduration=1.307809872 podStartE2EDuration="4.107206663s" podCreationTimestamp="2025-12-05 06:04:55 +0000 UTC" firstStartedPulling="2025-12-05 06:04:55.590453756 +0000 UTC m=+771.502588818" lastFinishedPulling="2025-12-05 06:04:58.389850507 +0000 UTC m=+774.301985609" observedRunningTime="2025-12-05 06:04:59.103906456 +0000 UTC m=+775.016041558" watchObservedRunningTime="2025-12-05 06:04:59.107206663 +0000 UTC m=+775.019341815" Dec 05 06:04:59 crc kubenswrapper[4742]: I1205 06:04:59.140592 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-jsbxs" podStartSLOduration=1.577959618 podStartE2EDuration="4.140562989s" podCreationTimestamp="2025-12-05 06:04:55 +0000 UTC" firstStartedPulling="2025-12-05 06:04:55.819206955 +0000 UTC m=+771.731342017" lastFinishedPulling="2025-12-05 06:04:58.381810296 +0000 UTC m=+774.293945388" observedRunningTime="2025-12-05 06:04:59.131734577 +0000 UTC m=+775.043869739" watchObservedRunningTime="2025-12-05 06:04:59.140562989 +0000 UTC m=+775.052698131" Dec 05 06:04:59 crc kubenswrapper[4742]: I1205 06:04:59.159011 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-7zxqr" podStartSLOduration=1.5072751420000001 podStartE2EDuration="4.158980303s" podCreationTimestamp="2025-12-05 06:04:55 +0000 UTC" firstStartedPulling="2025-12-05 06:04:55.739228834 +0000 UTC m=+771.651363896" lastFinishedPulling="2025-12-05 06:04:58.390933985 +0000 UTC m=+774.303069057" observedRunningTime="2025-12-05 06:04:59.15354919 +0000 UTC m=+775.065684292" watchObservedRunningTime="2025-12-05 06:04:59.158980303 +0000 UTC m=+775.071115405" Dec 05 06:05:01 crc kubenswrapper[4742]: I1205 06:05:01.107468 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-4xvgt" event={"ID":"e78be74b-15df-44da-9b82-909e008442b0","Type":"ContainerStarted","Data":"4c56d87372c1f1bc7d5e49824de8dde89988ffdfa40aa87b13e2a69109706eac"} Dec 05 06:05:01 crc kubenswrapper[4742]: I1205 06:05:01.127886 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-4xvgt" podStartSLOduration=1.353032828 podStartE2EDuration="6.127858296s" podCreationTimestamp="2025-12-05 06:04:55 +0000 UTC" firstStartedPulling="2025-12-05 06:04:55.894533584 +0000 UTC m=+771.806668646" lastFinishedPulling="2025-12-05 06:05:00.669359042 +0000 UTC m=+776.581494114" observedRunningTime="2025-12-05 06:05:01.125439202 +0000 UTC m=+777.037574304" watchObservedRunningTime="2025-12-05 06:05:01.127858296 +0000 UTC m=+777.039993398" Dec 05 06:05:05 crc kubenswrapper[4742]: I1205 06:05:05.593954 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-l8d88" Dec 05 06:05:05 crc kubenswrapper[4742]: I1205 06:05:05.851094 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-bcf7ddbbf-wzwnk" Dec 05 06:05:05 crc kubenswrapper[4742]: I1205 06:05:05.851228 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-bcf7ddbbf-wzwnk" Dec 05 06:05:05 crc kubenswrapper[4742]: I1205 06:05:05.859145 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-bcf7ddbbf-wzwnk" Dec 05 06:05:06 crc kubenswrapper[4742]: I1205 06:05:06.148203 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-bcf7ddbbf-wzwnk" Dec 05 06:05:06 crc kubenswrapper[4742]: I1205 06:05:06.224465 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-778cz"] Dec 05 06:05:15 crc kubenswrapper[4742]: I1205 06:05:15.430906 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-7zxqr" Dec 05 06:05:16 crc kubenswrapper[4742]: I1205 06:05:16.670809 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:05:16 crc kubenswrapper[4742]: I1205 06:05:16.670903 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:05:16 crc kubenswrapper[4742]: I1205 06:05:16.670953 4742 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" Dec 05 06:05:16 crc kubenswrapper[4742]: I1205 06:05:16.671558 4742 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8712ed854f5ba4470f6a7971cc87ba22a52ea34afd6f25ae35634401b00bf15a"} pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 06:05:16 crc kubenswrapper[4742]: I1205 06:05:16.671647 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" containerID="cri-o://8712ed854f5ba4470f6a7971cc87ba22a52ea34afd6f25ae35634401b00bf15a" gracePeriod=600 Dec 05 06:05:17 crc kubenswrapper[4742]: I1205 06:05:17.228855 4742 generic.go:334] "Generic (PLEG): container finished" podID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerID="8712ed854f5ba4470f6a7971cc87ba22a52ea34afd6f25ae35634401b00bf15a" exitCode=0 Dec 05 06:05:17 crc kubenswrapper[4742]: I1205 06:05:17.228920 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerDied","Data":"8712ed854f5ba4470f6a7971cc87ba22a52ea34afd6f25ae35634401b00bf15a"} Dec 05 06:05:17 crc kubenswrapper[4742]: I1205 06:05:17.229281 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerStarted","Data":"6c6428d248edc5bf49d6ecdb41f4e3135ccfb37d799e38e42171af4f0f46c67b"} Dec 05 06:05:17 crc kubenswrapper[4742]: I1205 06:05:17.229315 4742 scope.go:117] "RemoveContainer" containerID="cb5bbdf33d38a4f3a6dca04ce1c51478916fa93eae9566e2afc52c599bf9e737" Dec 05 06:05:29 crc kubenswrapper[4742]: I1205 06:05:29.665638 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h"] Dec 05 06:05:29 crc kubenswrapper[4742]: I1205 06:05:29.667214 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h" Dec 05 06:05:29 crc kubenswrapper[4742]: I1205 06:05:29.670190 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 06:05:29 crc kubenswrapper[4742]: I1205 06:05:29.677668 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h"] Dec 05 06:05:29 crc kubenswrapper[4742]: I1205 06:05:29.717541 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/822fa448-d076-49f9-9467-2c912b88b081-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h\" (UID: \"822fa448-d076-49f9-9467-2c912b88b081\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h" Dec 05 06:05:29 crc kubenswrapper[4742]: I1205 06:05:29.717888 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/822fa448-d076-49f9-9467-2c912b88b081-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h\" (UID: \"822fa448-d076-49f9-9467-2c912b88b081\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h" Dec 05 06:05:29 crc kubenswrapper[4742]: I1205 06:05:29.717960 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqgg6\" (UniqueName: \"kubernetes.io/projected/822fa448-d076-49f9-9467-2c912b88b081-kube-api-access-hqgg6\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h\" (UID: \"822fa448-d076-49f9-9467-2c912b88b081\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h" Dec 05 06:05:29 crc kubenswrapper[4742]: I1205 06:05:29.818775 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqgg6\" (UniqueName: \"kubernetes.io/projected/822fa448-d076-49f9-9467-2c912b88b081-kube-api-access-hqgg6\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h\" (UID: \"822fa448-d076-49f9-9467-2c912b88b081\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h" Dec 05 06:05:29 crc kubenswrapper[4742]: I1205 06:05:29.818896 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/822fa448-d076-49f9-9467-2c912b88b081-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h\" (UID: \"822fa448-d076-49f9-9467-2c912b88b081\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h" Dec 05 06:05:29 crc kubenswrapper[4742]: I1205 06:05:29.818956 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/822fa448-d076-49f9-9467-2c912b88b081-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h\" (UID: \"822fa448-d076-49f9-9467-2c912b88b081\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h" Dec 05 06:05:29 crc kubenswrapper[4742]: I1205 06:05:29.819833 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/822fa448-d076-49f9-9467-2c912b88b081-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h\" (UID: \"822fa448-d076-49f9-9467-2c912b88b081\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h" Dec 05 06:05:29 crc kubenswrapper[4742]: I1205 06:05:29.819951 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/822fa448-d076-49f9-9467-2c912b88b081-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h\" (UID: \"822fa448-d076-49f9-9467-2c912b88b081\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h" Dec 05 06:05:29 crc kubenswrapper[4742]: I1205 06:05:29.853801 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqgg6\" (UniqueName: \"kubernetes.io/projected/822fa448-d076-49f9-9467-2c912b88b081-kube-api-access-hqgg6\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h\" (UID: \"822fa448-d076-49f9-9467-2c912b88b081\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h" Dec 05 06:05:29 crc kubenswrapper[4742]: I1205 06:05:29.992296 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h" Dec 05 06:05:30 crc kubenswrapper[4742]: I1205 06:05:30.290596 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h"] Dec 05 06:05:30 crc kubenswrapper[4742]: I1205 06:05:30.313772 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h" event={"ID":"822fa448-d076-49f9-9467-2c912b88b081","Type":"ContainerStarted","Data":"4ef42aff1a51a3c0ececa15ae571b5326caca292616efbb5ce1fd3d32c509d12"} Dec 05 06:05:31 crc kubenswrapper[4742]: I1205 06:05:31.288833 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-778cz" podUID="91c75381-2f50-415e-b5c8-e1261be30bbc" containerName="console" containerID="cri-o://d17a56b7502b023471f3fb32631c1f94d329b0a14decfdb1f1f2c37bc74d57c9" gracePeriod=15 Dec 05 06:05:31 crc kubenswrapper[4742]: I1205 06:05:31.325497 4742 generic.go:334] "Generic (PLEG): container finished" podID="822fa448-d076-49f9-9467-2c912b88b081" containerID="6333486b5c1d13702e8fa0701d11f4911b13495913a2660eeabcfe23a79e5664" exitCode=0 Dec 05 06:05:31 crc kubenswrapper[4742]: I1205 06:05:31.325900 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h" event={"ID":"822fa448-d076-49f9-9467-2c912b88b081","Type":"ContainerDied","Data":"6333486b5c1d13702e8fa0701d11f4911b13495913a2660eeabcfe23a79e5664"} Dec 05 06:05:31 crc kubenswrapper[4742]: I1205 06:05:31.509845 4742 patch_prober.go:28] interesting pod/console-f9d7485db-778cz container/console namespace/openshift-console: Readiness probe status=failure output="Get \"https://10.217.0.19:8443/health\": dial tcp 10.217.0.19:8443: connect: connection refused" start-of-body= Dec 05 06:05:31 crc kubenswrapper[4742]: I1205 06:05:31.509926 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/console-f9d7485db-778cz" podUID="91c75381-2f50-415e-b5c8-e1261be30bbc" containerName="console" probeResult="failure" output="Get \"https://10.217.0.19:8443/health\": dial tcp 10.217.0.19:8443: connect: connection refused" Dec 05 06:05:31 crc kubenswrapper[4742]: I1205 06:05:31.758537 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-778cz_91c75381-2f50-415e-b5c8-e1261be30bbc/console/0.log" Dec 05 06:05:31 crc kubenswrapper[4742]: I1205 06:05:31.758639 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-778cz" Dec 05 06:05:31 crc kubenswrapper[4742]: I1205 06:05:31.847433 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/91c75381-2f50-415e-b5c8-e1261be30bbc-oauth-serving-cert\") pod \"91c75381-2f50-415e-b5c8-e1261be30bbc\" (UID: \"91c75381-2f50-415e-b5c8-e1261be30bbc\") " Dec 05 06:05:31 crc kubenswrapper[4742]: I1205 06:05:31.847544 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/91c75381-2f50-415e-b5c8-e1261be30bbc-console-config\") pod \"91c75381-2f50-415e-b5c8-e1261be30bbc\" (UID: \"91c75381-2f50-415e-b5c8-e1261be30bbc\") " Dec 05 06:05:31 crc kubenswrapper[4742]: I1205 06:05:31.847628 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/91c75381-2f50-415e-b5c8-e1261be30bbc-trusted-ca-bundle\") pod \"91c75381-2f50-415e-b5c8-e1261be30bbc\" (UID: \"91c75381-2f50-415e-b5c8-e1261be30bbc\") " Dec 05 06:05:31 crc kubenswrapper[4742]: I1205 06:05:31.847662 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/91c75381-2f50-415e-b5c8-e1261be30bbc-service-ca\") pod \"91c75381-2f50-415e-b5c8-e1261be30bbc\" (UID: \"91c75381-2f50-415e-b5c8-e1261be30bbc\") " Dec 05 06:05:31 crc kubenswrapper[4742]: I1205 06:05:31.847704 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/91c75381-2f50-415e-b5c8-e1261be30bbc-console-serving-cert\") pod \"91c75381-2f50-415e-b5c8-e1261be30bbc\" (UID: \"91c75381-2f50-415e-b5c8-e1261be30bbc\") " Dec 05 06:05:31 crc kubenswrapper[4742]: I1205 06:05:31.847760 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z6smz\" (UniqueName: \"kubernetes.io/projected/91c75381-2f50-415e-b5c8-e1261be30bbc-kube-api-access-z6smz\") pod \"91c75381-2f50-415e-b5c8-e1261be30bbc\" (UID: \"91c75381-2f50-415e-b5c8-e1261be30bbc\") " Dec 05 06:05:31 crc kubenswrapper[4742]: I1205 06:05:31.847881 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/91c75381-2f50-415e-b5c8-e1261be30bbc-console-oauth-config\") pod \"91c75381-2f50-415e-b5c8-e1261be30bbc\" (UID: \"91c75381-2f50-415e-b5c8-e1261be30bbc\") " Dec 05 06:05:31 crc kubenswrapper[4742]: I1205 06:05:31.849566 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91c75381-2f50-415e-b5c8-e1261be30bbc-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "91c75381-2f50-415e-b5c8-e1261be30bbc" (UID: "91c75381-2f50-415e-b5c8-e1261be30bbc"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:05:31 crc kubenswrapper[4742]: I1205 06:05:31.849902 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91c75381-2f50-415e-b5c8-e1261be30bbc-service-ca" (OuterVolumeSpecName: "service-ca") pod "91c75381-2f50-415e-b5c8-e1261be30bbc" (UID: "91c75381-2f50-415e-b5c8-e1261be30bbc"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:05:31 crc kubenswrapper[4742]: I1205 06:05:31.850200 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91c75381-2f50-415e-b5c8-e1261be30bbc-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "91c75381-2f50-415e-b5c8-e1261be30bbc" (UID: "91c75381-2f50-415e-b5c8-e1261be30bbc"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:05:31 crc kubenswrapper[4742]: I1205 06:05:31.850421 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91c75381-2f50-415e-b5c8-e1261be30bbc-console-config" (OuterVolumeSpecName: "console-config") pod "91c75381-2f50-415e-b5c8-e1261be30bbc" (UID: "91c75381-2f50-415e-b5c8-e1261be30bbc"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:05:31 crc kubenswrapper[4742]: I1205 06:05:31.858456 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91c75381-2f50-415e-b5c8-e1261be30bbc-kube-api-access-z6smz" (OuterVolumeSpecName: "kube-api-access-z6smz") pod "91c75381-2f50-415e-b5c8-e1261be30bbc" (UID: "91c75381-2f50-415e-b5c8-e1261be30bbc"). InnerVolumeSpecName "kube-api-access-z6smz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:05:31 crc kubenswrapper[4742]: I1205 06:05:31.858644 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91c75381-2f50-415e-b5c8-e1261be30bbc-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "91c75381-2f50-415e-b5c8-e1261be30bbc" (UID: "91c75381-2f50-415e-b5c8-e1261be30bbc"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:05:31 crc kubenswrapper[4742]: I1205 06:05:31.858974 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91c75381-2f50-415e-b5c8-e1261be30bbc-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "91c75381-2f50-415e-b5c8-e1261be30bbc" (UID: "91c75381-2f50-415e-b5c8-e1261be30bbc"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:05:31 crc kubenswrapper[4742]: I1205 06:05:31.949577 4742 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/91c75381-2f50-415e-b5c8-e1261be30bbc-console-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:05:31 crc kubenswrapper[4742]: I1205 06:05:31.949612 4742 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/91c75381-2f50-415e-b5c8-e1261be30bbc-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 06:05:31 crc kubenswrapper[4742]: I1205 06:05:31.949621 4742 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/91c75381-2f50-415e-b5c8-e1261be30bbc-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:05:31 crc kubenswrapper[4742]: I1205 06:05:31.949630 4742 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/91c75381-2f50-415e-b5c8-e1261be30bbc-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:05:31 crc kubenswrapper[4742]: I1205 06:05:31.949639 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z6smz\" (UniqueName: \"kubernetes.io/projected/91c75381-2f50-415e-b5c8-e1261be30bbc-kube-api-access-z6smz\") on node \"crc\" DevicePath \"\"" Dec 05 06:05:31 crc kubenswrapper[4742]: I1205 06:05:31.949649 4742 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/91c75381-2f50-415e-b5c8-e1261be30bbc-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:05:31 crc kubenswrapper[4742]: I1205 06:05:31.949658 4742 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/91c75381-2f50-415e-b5c8-e1261be30bbc-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 06:05:32 crc kubenswrapper[4742]: I1205 06:05:32.338903 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-778cz_91c75381-2f50-415e-b5c8-e1261be30bbc/console/0.log" Dec 05 06:05:32 crc kubenswrapper[4742]: I1205 06:05:32.340259 4742 generic.go:334] "Generic (PLEG): container finished" podID="91c75381-2f50-415e-b5c8-e1261be30bbc" containerID="d17a56b7502b023471f3fb32631c1f94d329b0a14decfdb1f1f2c37bc74d57c9" exitCode=2 Dec 05 06:05:32 crc kubenswrapper[4742]: I1205 06:05:32.340422 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-778cz" Dec 05 06:05:32 crc kubenswrapper[4742]: I1205 06:05:32.340387 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-778cz" event={"ID":"91c75381-2f50-415e-b5c8-e1261be30bbc","Type":"ContainerDied","Data":"d17a56b7502b023471f3fb32631c1f94d329b0a14decfdb1f1f2c37bc74d57c9"} Dec 05 06:05:32 crc kubenswrapper[4742]: I1205 06:05:32.340668 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-778cz" event={"ID":"91c75381-2f50-415e-b5c8-e1261be30bbc","Type":"ContainerDied","Data":"362a2e6253948e9db73071b23feeb0fc9f46d56586a15bf87a748fbe762f5b36"} Dec 05 06:05:32 crc kubenswrapper[4742]: I1205 06:05:32.340692 4742 scope.go:117] "RemoveContainer" containerID="d17a56b7502b023471f3fb32631c1f94d329b0a14decfdb1f1f2c37bc74d57c9" Dec 05 06:05:32 crc kubenswrapper[4742]: I1205 06:05:32.370396 4742 scope.go:117] "RemoveContainer" containerID="d17a56b7502b023471f3fb32631c1f94d329b0a14decfdb1f1f2c37bc74d57c9" Dec 05 06:05:32 crc kubenswrapper[4742]: E1205 06:05:32.371244 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d17a56b7502b023471f3fb32631c1f94d329b0a14decfdb1f1f2c37bc74d57c9\": container with ID starting with d17a56b7502b023471f3fb32631c1f94d329b0a14decfdb1f1f2c37bc74d57c9 not found: ID does not exist" containerID="d17a56b7502b023471f3fb32631c1f94d329b0a14decfdb1f1f2c37bc74d57c9" Dec 05 06:05:32 crc kubenswrapper[4742]: I1205 06:05:32.371338 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d17a56b7502b023471f3fb32631c1f94d329b0a14decfdb1f1f2c37bc74d57c9"} err="failed to get container status \"d17a56b7502b023471f3fb32631c1f94d329b0a14decfdb1f1f2c37bc74d57c9\": rpc error: code = NotFound desc = could not find container \"d17a56b7502b023471f3fb32631c1f94d329b0a14decfdb1f1f2c37bc74d57c9\": container with ID starting with d17a56b7502b023471f3fb32631c1f94d329b0a14decfdb1f1f2c37bc74d57c9 not found: ID does not exist" Dec 05 06:05:32 crc kubenswrapper[4742]: I1205 06:05:32.401518 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-778cz"] Dec 05 06:05:32 crc kubenswrapper[4742]: I1205 06:05:32.405875 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-778cz"] Dec 05 06:05:33 crc kubenswrapper[4742]: I1205 06:05:33.350523 4742 generic.go:334] "Generic (PLEG): container finished" podID="822fa448-d076-49f9-9467-2c912b88b081" containerID="9b013f8a9cc039a3e27e792aa70dac0347524fbcbe3f8577d1a261176f4e4033" exitCode=0 Dec 05 06:05:33 crc kubenswrapper[4742]: I1205 06:05:33.350581 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h" event={"ID":"822fa448-d076-49f9-9467-2c912b88b081","Type":"ContainerDied","Data":"9b013f8a9cc039a3e27e792aa70dac0347524fbcbe3f8577d1a261176f4e4033"} Dec 05 06:05:34 crc kubenswrapper[4742]: I1205 06:05:34.361400 4742 generic.go:334] "Generic (PLEG): container finished" podID="822fa448-d076-49f9-9467-2c912b88b081" containerID="fca8eac4a4a2fa8be35e7a72ed89e2d83b3c0e7982a1a05aeb93755606f629c6" exitCode=0 Dec 05 06:05:34 crc kubenswrapper[4742]: I1205 06:05:34.361495 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h" event={"ID":"822fa448-d076-49f9-9467-2c912b88b081","Type":"ContainerDied","Data":"fca8eac4a4a2fa8be35e7a72ed89e2d83b3c0e7982a1a05aeb93755606f629c6"} Dec 05 06:05:34 crc kubenswrapper[4742]: I1205 06:05:34.392632 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91c75381-2f50-415e-b5c8-e1261be30bbc" path="/var/lib/kubelet/pods/91c75381-2f50-415e-b5c8-e1261be30bbc/volumes" Dec 05 06:05:35 crc kubenswrapper[4742]: I1205 06:05:35.683507 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h" Dec 05 06:05:35 crc kubenswrapper[4742]: I1205 06:05:35.694451 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/822fa448-d076-49f9-9467-2c912b88b081-util\") pod \"822fa448-d076-49f9-9467-2c912b88b081\" (UID: \"822fa448-d076-49f9-9467-2c912b88b081\") " Dec 05 06:05:35 crc kubenswrapper[4742]: I1205 06:05:35.694552 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hqgg6\" (UniqueName: \"kubernetes.io/projected/822fa448-d076-49f9-9467-2c912b88b081-kube-api-access-hqgg6\") pod \"822fa448-d076-49f9-9467-2c912b88b081\" (UID: \"822fa448-d076-49f9-9467-2c912b88b081\") " Dec 05 06:05:35 crc kubenswrapper[4742]: I1205 06:05:35.694582 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/822fa448-d076-49f9-9467-2c912b88b081-bundle\") pod \"822fa448-d076-49f9-9467-2c912b88b081\" (UID: \"822fa448-d076-49f9-9467-2c912b88b081\") " Dec 05 06:05:35 crc kubenswrapper[4742]: I1205 06:05:35.695951 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/822fa448-d076-49f9-9467-2c912b88b081-bundle" (OuterVolumeSpecName: "bundle") pod "822fa448-d076-49f9-9467-2c912b88b081" (UID: "822fa448-d076-49f9-9467-2c912b88b081"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:05:35 crc kubenswrapper[4742]: I1205 06:05:35.705668 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/822fa448-d076-49f9-9467-2c912b88b081-kube-api-access-hqgg6" (OuterVolumeSpecName: "kube-api-access-hqgg6") pod "822fa448-d076-49f9-9467-2c912b88b081" (UID: "822fa448-d076-49f9-9467-2c912b88b081"). InnerVolumeSpecName "kube-api-access-hqgg6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:05:35 crc kubenswrapper[4742]: I1205 06:05:35.720411 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/822fa448-d076-49f9-9467-2c912b88b081-util" (OuterVolumeSpecName: "util") pod "822fa448-d076-49f9-9467-2c912b88b081" (UID: "822fa448-d076-49f9-9467-2c912b88b081"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:05:35 crc kubenswrapper[4742]: I1205 06:05:35.796874 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hqgg6\" (UniqueName: \"kubernetes.io/projected/822fa448-d076-49f9-9467-2c912b88b081-kube-api-access-hqgg6\") on node \"crc\" DevicePath \"\"" Dec 05 06:05:35 crc kubenswrapper[4742]: I1205 06:05:35.796958 4742 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/822fa448-d076-49f9-9467-2c912b88b081-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:05:35 crc kubenswrapper[4742]: I1205 06:05:35.796973 4742 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/822fa448-d076-49f9-9467-2c912b88b081-util\") on node \"crc\" DevicePath \"\"" Dec 05 06:05:36 crc kubenswrapper[4742]: I1205 06:05:36.378389 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h" event={"ID":"822fa448-d076-49f9-9467-2c912b88b081","Type":"ContainerDied","Data":"4ef42aff1a51a3c0ececa15ae571b5326caca292616efbb5ce1fd3d32c509d12"} Dec 05 06:05:36 crc kubenswrapper[4742]: I1205 06:05:36.378446 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4ef42aff1a51a3c0ececa15ae571b5326caca292616efbb5ce1fd3d32c509d12" Dec 05 06:05:36 crc kubenswrapper[4742]: I1205 06:05:36.378591 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h" Dec 05 06:05:44 crc kubenswrapper[4742]: I1205 06:05:44.720370 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-58c6bb7977-9wmg7"] Dec 05 06:05:44 crc kubenswrapper[4742]: E1205 06:05:44.721338 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="822fa448-d076-49f9-9467-2c912b88b081" containerName="pull" Dec 05 06:05:44 crc kubenswrapper[4742]: I1205 06:05:44.721357 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="822fa448-d076-49f9-9467-2c912b88b081" containerName="pull" Dec 05 06:05:44 crc kubenswrapper[4742]: E1205 06:05:44.721380 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="822fa448-d076-49f9-9467-2c912b88b081" containerName="extract" Dec 05 06:05:44 crc kubenswrapper[4742]: I1205 06:05:44.721388 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="822fa448-d076-49f9-9467-2c912b88b081" containerName="extract" Dec 05 06:05:44 crc kubenswrapper[4742]: E1205 06:05:44.721404 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="822fa448-d076-49f9-9467-2c912b88b081" containerName="util" Dec 05 06:05:44 crc kubenswrapper[4742]: I1205 06:05:44.721411 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="822fa448-d076-49f9-9467-2c912b88b081" containerName="util" Dec 05 06:05:44 crc kubenswrapper[4742]: E1205 06:05:44.721429 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91c75381-2f50-415e-b5c8-e1261be30bbc" containerName="console" Dec 05 06:05:44 crc kubenswrapper[4742]: I1205 06:05:44.721436 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="91c75381-2f50-415e-b5c8-e1261be30bbc" containerName="console" Dec 05 06:05:44 crc kubenswrapper[4742]: I1205 06:05:44.721583 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="91c75381-2f50-415e-b5c8-e1261be30bbc" containerName="console" Dec 05 06:05:44 crc kubenswrapper[4742]: I1205 06:05:44.721597 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="822fa448-d076-49f9-9467-2c912b88b081" containerName="extract" Dec 05 06:05:44 crc kubenswrapper[4742]: I1205 06:05:44.722241 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-58c6bb7977-9wmg7" Dec 05 06:05:44 crc kubenswrapper[4742]: I1205 06:05:44.725694 4742 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 05 06:05:44 crc kubenswrapper[4742]: I1205 06:05:44.725705 4742 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 05 06:05:44 crc kubenswrapper[4742]: I1205 06:05:44.726197 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 05 06:05:44 crc kubenswrapper[4742]: I1205 06:05:44.726234 4742 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-mgjh4" Dec 05 06:05:44 crc kubenswrapper[4742]: I1205 06:05:44.730130 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 05 06:05:44 crc kubenswrapper[4742]: I1205 06:05:44.734994 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-58c6bb7977-9wmg7"] Dec 05 06:05:44 crc kubenswrapper[4742]: I1205 06:05:44.823134 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dnbw\" (UniqueName: \"kubernetes.io/projected/1aabe43c-f43a-4355-bff2-0cea43761b1f-kube-api-access-2dnbw\") pod \"metallb-operator-controller-manager-58c6bb7977-9wmg7\" (UID: \"1aabe43c-f43a-4355-bff2-0cea43761b1f\") " pod="metallb-system/metallb-operator-controller-manager-58c6bb7977-9wmg7" Dec 05 06:05:44 crc kubenswrapper[4742]: I1205 06:05:44.823213 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1aabe43c-f43a-4355-bff2-0cea43761b1f-apiservice-cert\") pod \"metallb-operator-controller-manager-58c6bb7977-9wmg7\" (UID: \"1aabe43c-f43a-4355-bff2-0cea43761b1f\") " pod="metallb-system/metallb-operator-controller-manager-58c6bb7977-9wmg7" Dec 05 06:05:44 crc kubenswrapper[4742]: I1205 06:05:44.823239 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1aabe43c-f43a-4355-bff2-0cea43761b1f-webhook-cert\") pod \"metallb-operator-controller-manager-58c6bb7977-9wmg7\" (UID: \"1aabe43c-f43a-4355-bff2-0cea43761b1f\") " pod="metallb-system/metallb-operator-controller-manager-58c6bb7977-9wmg7" Dec 05 06:05:44 crc kubenswrapper[4742]: I1205 06:05:44.924267 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dnbw\" (UniqueName: \"kubernetes.io/projected/1aabe43c-f43a-4355-bff2-0cea43761b1f-kube-api-access-2dnbw\") pod \"metallb-operator-controller-manager-58c6bb7977-9wmg7\" (UID: \"1aabe43c-f43a-4355-bff2-0cea43761b1f\") " pod="metallb-system/metallb-operator-controller-manager-58c6bb7977-9wmg7" Dec 05 06:05:44 crc kubenswrapper[4742]: I1205 06:05:44.924339 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1aabe43c-f43a-4355-bff2-0cea43761b1f-apiservice-cert\") pod \"metallb-operator-controller-manager-58c6bb7977-9wmg7\" (UID: \"1aabe43c-f43a-4355-bff2-0cea43761b1f\") " pod="metallb-system/metallb-operator-controller-manager-58c6bb7977-9wmg7" Dec 05 06:05:44 crc kubenswrapper[4742]: I1205 06:05:44.924362 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1aabe43c-f43a-4355-bff2-0cea43761b1f-webhook-cert\") pod \"metallb-operator-controller-manager-58c6bb7977-9wmg7\" (UID: \"1aabe43c-f43a-4355-bff2-0cea43761b1f\") " pod="metallb-system/metallb-operator-controller-manager-58c6bb7977-9wmg7" Dec 05 06:05:44 crc kubenswrapper[4742]: I1205 06:05:44.931411 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1aabe43c-f43a-4355-bff2-0cea43761b1f-apiservice-cert\") pod \"metallb-operator-controller-manager-58c6bb7977-9wmg7\" (UID: \"1aabe43c-f43a-4355-bff2-0cea43761b1f\") " pod="metallb-system/metallb-operator-controller-manager-58c6bb7977-9wmg7" Dec 05 06:05:44 crc kubenswrapper[4742]: I1205 06:05:44.931643 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1aabe43c-f43a-4355-bff2-0cea43761b1f-webhook-cert\") pod \"metallb-operator-controller-manager-58c6bb7977-9wmg7\" (UID: \"1aabe43c-f43a-4355-bff2-0cea43761b1f\") " pod="metallb-system/metallb-operator-controller-manager-58c6bb7977-9wmg7" Dec 05 06:05:44 crc kubenswrapper[4742]: I1205 06:05:44.960046 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dnbw\" (UniqueName: \"kubernetes.io/projected/1aabe43c-f43a-4355-bff2-0cea43761b1f-kube-api-access-2dnbw\") pod \"metallb-operator-controller-manager-58c6bb7977-9wmg7\" (UID: \"1aabe43c-f43a-4355-bff2-0cea43761b1f\") " pod="metallb-system/metallb-operator-controller-manager-58c6bb7977-9wmg7" Dec 05 06:05:45 crc kubenswrapper[4742]: I1205 06:05:45.011047 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-c7469647b-vlc7v"] Dec 05 06:05:45 crc kubenswrapper[4742]: I1205 06:05:45.011715 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-c7469647b-vlc7v" Dec 05 06:05:45 crc kubenswrapper[4742]: I1205 06:05:45.015244 4742 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-7kjz5" Dec 05 06:05:45 crc kubenswrapper[4742]: I1205 06:05:45.015242 4742 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 05 06:05:45 crc kubenswrapper[4742]: I1205 06:05:45.019721 4742 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 05 06:05:45 crc kubenswrapper[4742]: I1205 06:05:45.045869 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-58c6bb7977-9wmg7" Dec 05 06:05:45 crc kubenswrapper[4742]: I1205 06:05:45.097964 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-c7469647b-vlc7v"] Dec 05 06:05:45 crc kubenswrapper[4742]: I1205 06:05:45.126023 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/811775cd-1d30-4b17-aa34-63ce86817f71-webhook-cert\") pod \"metallb-operator-webhook-server-c7469647b-vlc7v\" (UID: \"811775cd-1d30-4b17-aa34-63ce86817f71\") " pod="metallb-system/metallb-operator-webhook-server-c7469647b-vlc7v" Dec 05 06:05:45 crc kubenswrapper[4742]: I1205 06:05:45.126088 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpfm6\" (UniqueName: \"kubernetes.io/projected/811775cd-1d30-4b17-aa34-63ce86817f71-kube-api-access-tpfm6\") pod \"metallb-operator-webhook-server-c7469647b-vlc7v\" (UID: \"811775cd-1d30-4b17-aa34-63ce86817f71\") " pod="metallb-system/metallb-operator-webhook-server-c7469647b-vlc7v" Dec 05 06:05:45 crc kubenswrapper[4742]: I1205 06:05:45.126108 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/811775cd-1d30-4b17-aa34-63ce86817f71-apiservice-cert\") pod \"metallb-operator-webhook-server-c7469647b-vlc7v\" (UID: \"811775cd-1d30-4b17-aa34-63ce86817f71\") " pod="metallb-system/metallb-operator-webhook-server-c7469647b-vlc7v" Dec 05 06:05:45 crc kubenswrapper[4742]: I1205 06:05:45.227380 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/811775cd-1d30-4b17-aa34-63ce86817f71-webhook-cert\") pod \"metallb-operator-webhook-server-c7469647b-vlc7v\" (UID: \"811775cd-1d30-4b17-aa34-63ce86817f71\") " pod="metallb-system/metallb-operator-webhook-server-c7469647b-vlc7v" Dec 05 06:05:45 crc kubenswrapper[4742]: I1205 06:05:45.227775 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpfm6\" (UniqueName: \"kubernetes.io/projected/811775cd-1d30-4b17-aa34-63ce86817f71-kube-api-access-tpfm6\") pod \"metallb-operator-webhook-server-c7469647b-vlc7v\" (UID: \"811775cd-1d30-4b17-aa34-63ce86817f71\") " pod="metallb-system/metallb-operator-webhook-server-c7469647b-vlc7v" Dec 05 06:05:45 crc kubenswrapper[4742]: I1205 06:05:45.227802 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/811775cd-1d30-4b17-aa34-63ce86817f71-apiservice-cert\") pod \"metallb-operator-webhook-server-c7469647b-vlc7v\" (UID: \"811775cd-1d30-4b17-aa34-63ce86817f71\") " pod="metallb-system/metallb-operator-webhook-server-c7469647b-vlc7v" Dec 05 06:05:45 crc kubenswrapper[4742]: I1205 06:05:45.235725 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/811775cd-1d30-4b17-aa34-63ce86817f71-apiservice-cert\") pod \"metallb-operator-webhook-server-c7469647b-vlc7v\" (UID: \"811775cd-1d30-4b17-aa34-63ce86817f71\") " pod="metallb-system/metallb-operator-webhook-server-c7469647b-vlc7v" Dec 05 06:05:45 crc kubenswrapper[4742]: I1205 06:05:45.237109 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/811775cd-1d30-4b17-aa34-63ce86817f71-webhook-cert\") pod \"metallb-operator-webhook-server-c7469647b-vlc7v\" (UID: \"811775cd-1d30-4b17-aa34-63ce86817f71\") " pod="metallb-system/metallb-operator-webhook-server-c7469647b-vlc7v" Dec 05 06:05:45 crc kubenswrapper[4742]: I1205 06:05:45.244597 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpfm6\" (UniqueName: \"kubernetes.io/projected/811775cd-1d30-4b17-aa34-63ce86817f71-kube-api-access-tpfm6\") pod \"metallb-operator-webhook-server-c7469647b-vlc7v\" (UID: \"811775cd-1d30-4b17-aa34-63ce86817f71\") " pod="metallb-system/metallb-operator-webhook-server-c7469647b-vlc7v" Dec 05 06:05:45 crc kubenswrapper[4742]: I1205 06:05:45.325397 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-c7469647b-vlc7v" Dec 05 06:05:45 crc kubenswrapper[4742]: I1205 06:05:45.544702 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-c7469647b-vlc7v"] Dec 05 06:05:45 crc kubenswrapper[4742]: W1205 06:05:45.556310 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod811775cd_1d30_4b17_aa34_63ce86817f71.slice/crio-f134e8aae9f3b64d67642b9837723873af6e1fd4e373e7149bc461fe9dd03709 WatchSource:0}: Error finding container f134e8aae9f3b64d67642b9837723873af6e1fd4e373e7149bc461fe9dd03709: Status 404 returned error can't find the container with id f134e8aae9f3b64d67642b9837723873af6e1fd4e373e7149bc461fe9dd03709 Dec 05 06:05:45 crc kubenswrapper[4742]: I1205 06:05:45.557538 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-58c6bb7977-9wmg7"] Dec 05 06:05:45 crc kubenswrapper[4742]: W1205 06:05:45.563444 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1aabe43c_f43a_4355_bff2_0cea43761b1f.slice/crio-ef0d2d0726aca36533affc0ee0304731bc4c118a99a10d3875b25a23aac91a6a WatchSource:0}: Error finding container ef0d2d0726aca36533affc0ee0304731bc4c118a99a10d3875b25a23aac91a6a: Status 404 returned error can't find the container with id ef0d2d0726aca36533affc0ee0304731bc4c118a99a10d3875b25a23aac91a6a Dec 05 06:05:46 crc kubenswrapper[4742]: I1205 06:05:46.439100 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-c7469647b-vlc7v" event={"ID":"811775cd-1d30-4b17-aa34-63ce86817f71","Type":"ContainerStarted","Data":"f134e8aae9f3b64d67642b9837723873af6e1fd4e373e7149bc461fe9dd03709"} Dec 05 06:05:46 crc kubenswrapper[4742]: I1205 06:05:46.440620 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-58c6bb7977-9wmg7" event={"ID":"1aabe43c-f43a-4355-bff2-0cea43761b1f","Type":"ContainerStarted","Data":"ef0d2d0726aca36533affc0ee0304731bc4c118a99a10d3875b25a23aac91a6a"} Dec 05 06:05:50 crc kubenswrapper[4742]: I1205 06:05:50.472974 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-c7469647b-vlc7v" event={"ID":"811775cd-1d30-4b17-aa34-63ce86817f71","Type":"ContainerStarted","Data":"b4bc418b1c4942e034dae1aa496f145c96cc0c3a0286b2e866e52d8a65a7a596"} Dec 05 06:05:50 crc kubenswrapper[4742]: I1205 06:05:50.473462 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-c7469647b-vlc7v" Dec 05 06:05:50 crc kubenswrapper[4742]: I1205 06:05:50.475373 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-58c6bb7977-9wmg7" event={"ID":"1aabe43c-f43a-4355-bff2-0cea43761b1f","Type":"ContainerStarted","Data":"ed46fda1fe6ba9e0919c0fd4b7c7870b7eed1ebbb090809694b10ccfe5035899"} Dec 05 06:05:50 crc kubenswrapper[4742]: I1205 06:05:50.475552 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-58c6bb7977-9wmg7" Dec 05 06:05:50 crc kubenswrapper[4742]: I1205 06:05:50.493960 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-c7469647b-vlc7v" podStartSLOduration=1.843544117 podStartE2EDuration="6.493913674s" podCreationTimestamp="2025-12-05 06:05:44 +0000 UTC" firstStartedPulling="2025-12-05 06:05:45.558965393 +0000 UTC m=+821.471100455" lastFinishedPulling="2025-12-05 06:05:50.20933494 +0000 UTC m=+826.121470012" observedRunningTime="2025-12-05 06:05:50.492812624 +0000 UTC m=+826.404947716" watchObservedRunningTime="2025-12-05 06:05:50.493913674 +0000 UTC m=+826.406048766" Dec 05 06:05:50 crc kubenswrapper[4742]: I1205 06:05:50.510883 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-58c6bb7977-9wmg7" podStartSLOduration=1.8768406930000001 podStartE2EDuration="6.510861235s" podCreationTimestamp="2025-12-05 06:05:44 +0000 UTC" firstStartedPulling="2025-12-05 06:05:45.568787484 +0000 UTC m=+821.480922536" lastFinishedPulling="2025-12-05 06:05:50.202808016 +0000 UTC m=+826.114943078" observedRunningTime="2025-12-05 06:05:50.508890462 +0000 UTC m=+826.421025534" watchObservedRunningTime="2025-12-05 06:05:50.510861235 +0000 UTC m=+826.422996287" Dec 05 06:06:05 crc kubenswrapper[4742]: I1205 06:06:05.333860 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-c7469647b-vlc7v" Dec 05 06:06:25 crc kubenswrapper[4742]: I1205 06:06:25.050879 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-58c6bb7977-9wmg7" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.492798 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-2mwsj"] Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.496119 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-2mwsj" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.500361 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.501894 4742 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.502414 4742 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-6gpv2" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.540812 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-qnzzl"] Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.541965 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-qnzzl" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.550603 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-qnzzl"] Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.571562 4742 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.648709 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/eb609210-85a4-48b1-94fc-bd35d13b9c3d-frr-startup\") pod \"frr-k8s-2mwsj\" (UID: \"eb609210-85a4-48b1-94fc-bd35d13b9c3d\") " pod="metallb-system/frr-k8s-2mwsj" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.648812 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vtwt\" (UniqueName: \"kubernetes.io/projected/eb609210-85a4-48b1-94fc-bd35d13b9c3d-kube-api-access-4vtwt\") pod \"frr-k8s-2mwsj\" (UID: \"eb609210-85a4-48b1-94fc-bd35d13b9c3d\") " pod="metallb-system/frr-k8s-2mwsj" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.648865 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/eb609210-85a4-48b1-94fc-bd35d13b9c3d-reloader\") pod \"frr-k8s-2mwsj\" (UID: \"eb609210-85a4-48b1-94fc-bd35d13b9c3d\") " pod="metallb-system/frr-k8s-2mwsj" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.648892 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1e968846-ee36-497c-b325-b3fb9a719dd5-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-qnzzl\" (UID: \"1e968846-ee36-497c-b325-b3fb9a719dd5\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-qnzzl" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.648914 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/eb609210-85a4-48b1-94fc-bd35d13b9c3d-metrics\") pod \"frr-k8s-2mwsj\" (UID: \"eb609210-85a4-48b1-94fc-bd35d13b9c3d\") " pod="metallb-system/frr-k8s-2mwsj" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.648936 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/eb609210-85a4-48b1-94fc-bd35d13b9c3d-frr-sockets\") pod \"frr-k8s-2mwsj\" (UID: \"eb609210-85a4-48b1-94fc-bd35d13b9c3d\") " pod="metallb-system/frr-k8s-2mwsj" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.648969 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jt45h\" (UniqueName: \"kubernetes.io/projected/1e968846-ee36-497c-b325-b3fb9a719dd5-kube-api-access-jt45h\") pod \"frr-k8s-webhook-server-7fcb986d4-qnzzl\" (UID: \"1e968846-ee36-497c-b325-b3fb9a719dd5\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-qnzzl" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.649000 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/eb609210-85a4-48b1-94fc-bd35d13b9c3d-frr-conf\") pod \"frr-k8s-2mwsj\" (UID: \"eb609210-85a4-48b1-94fc-bd35d13b9c3d\") " pod="metallb-system/frr-k8s-2mwsj" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.649020 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/eb609210-85a4-48b1-94fc-bd35d13b9c3d-metrics-certs\") pod \"frr-k8s-2mwsj\" (UID: \"eb609210-85a4-48b1-94fc-bd35d13b9c3d\") " pod="metallb-system/frr-k8s-2mwsj" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.684938 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-hv4z4"] Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.686029 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-hv4z4" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.701482 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.701513 4742 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.701513 4742 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-g8nbg" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.701829 4742 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.705758 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-4rdck"] Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.706565 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-4rdck" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.711813 4742 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.740379 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-4rdck"] Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.749610 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jt45h\" (UniqueName: \"kubernetes.io/projected/1e968846-ee36-497c-b325-b3fb9a719dd5-kube-api-access-jt45h\") pod \"frr-k8s-webhook-server-7fcb986d4-qnzzl\" (UID: \"1e968846-ee36-497c-b325-b3fb9a719dd5\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-qnzzl" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.749673 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/eb609210-85a4-48b1-94fc-bd35d13b9c3d-frr-conf\") pod \"frr-k8s-2mwsj\" (UID: \"eb609210-85a4-48b1-94fc-bd35d13b9c3d\") " pod="metallb-system/frr-k8s-2mwsj" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.749692 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/eb609210-85a4-48b1-94fc-bd35d13b9c3d-metrics-certs\") pod \"frr-k8s-2mwsj\" (UID: \"eb609210-85a4-48b1-94fc-bd35d13b9c3d\") " pod="metallb-system/frr-k8s-2mwsj" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.749725 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/eb609210-85a4-48b1-94fc-bd35d13b9c3d-frr-startup\") pod \"frr-k8s-2mwsj\" (UID: \"eb609210-85a4-48b1-94fc-bd35d13b9c3d\") " pod="metallb-system/frr-k8s-2mwsj" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.749751 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vtwt\" (UniqueName: \"kubernetes.io/projected/eb609210-85a4-48b1-94fc-bd35d13b9c3d-kube-api-access-4vtwt\") pod \"frr-k8s-2mwsj\" (UID: \"eb609210-85a4-48b1-94fc-bd35d13b9c3d\") " pod="metallb-system/frr-k8s-2mwsj" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.749782 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/eb609210-85a4-48b1-94fc-bd35d13b9c3d-reloader\") pod \"frr-k8s-2mwsj\" (UID: \"eb609210-85a4-48b1-94fc-bd35d13b9c3d\") " pod="metallb-system/frr-k8s-2mwsj" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.749799 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1e968846-ee36-497c-b325-b3fb9a719dd5-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-qnzzl\" (UID: \"1e968846-ee36-497c-b325-b3fb9a719dd5\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-qnzzl" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.749814 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/eb609210-85a4-48b1-94fc-bd35d13b9c3d-metrics\") pod \"frr-k8s-2mwsj\" (UID: \"eb609210-85a4-48b1-94fc-bd35d13b9c3d\") " pod="metallb-system/frr-k8s-2mwsj" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.749833 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/eb609210-85a4-48b1-94fc-bd35d13b9c3d-frr-sockets\") pod \"frr-k8s-2mwsj\" (UID: \"eb609210-85a4-48b1-94fc-bd35d13b9c3d\") " pod="metallb-system/frr-k8s-2mwsj" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.750241 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/eb609210-85a4-48b1-94fc-bd35d13b9c3d-frr-sockets\") pod \"frr-k8s-2mwsj\" (UID: \"eb609210-85a4-48b1-94fc-bd35d13b9c3d\") " pod="metallb-system/frr-k8s-2mwsj" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.750776 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/eb609210-85a4-48b1-94fc-bd35d13b9c3d-frr-conf\") pod \"frr-k8s-2mwsj\" (UID: \"eb609210-85a4-48b1-94fc-bd35d13b9c3d\") " pod="metallb-system/frr-k8s-2mwsj" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.750811 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/eb609210-85a4-48b1-94fc-bd35d13b9c3d-reloader\") pod \"frr-k8s-2mwsj\" (UID: \"eb609210-85a4-48b1-94fc-bd35d13b9c3d\") " pod="metallb-system/frr-k8s-2mwsj" Dec 05 06:06:26 crc kubenswrapper[4742]: E1205 06:06:26.750859 4742 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Dec 05 06:06:26 crc kubenswrapper[4742]: E1205 06:06:26.750900 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/eb609210-85a4-48b1-94fc-bd35d13b9c3d-metrics-certs podName:eb609210-85a4-48b1-94fc-bd35d13b9c3d nodeName:}" failed. No retries permitted until 2025-12-05 06:06:27.25088516 +0000 UTC m=+863.163020222 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/eb609210-85a4-48b1-94fc-bd35d13b9c3d-metrics-certs") pod "frr-k8s-2mwsj" (UID: "eb609210-85a4-48b1-94fc-bd35d13b9c3d") : secret "frr-k8s-certs-secret" not found Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.751760 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/eb609210-85a4-48b1-94fc-bd35d13b9c3d-frr-startup\") pod \"frr-k8s-2mwsj\" (UID: \"eb609210-85a4-48b1-94fc-bd35d13b9c3d\") " pod="metallb-system/frr-k8s-2mwsj" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.751947 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/eb609210-85a4-48b1-94fc-bd35d13b9c3d-metrics\") pod \"frr-k8s-2mwsj\" (UID: \"eb609210-85a4-48b1-94fc-bd35d13b9c3d\") " pod="metallb-system/frr-k8s-2mwsj" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.774711 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1e968846-ee36-497c-b325-b3fb9a719dd5-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-qnzzl\" (UID: \"1e968846-ee36-497c-b325-b3fb9a719dd5\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-qnzzl" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.776702 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vtwt\" (UniqueName: \"kubernetes.io/projected/eb609210-85a4-48b1-94fc-bd35d13b9c3d-kube-api-access-4vtwt\") pod \"frr-k8s-2mwsj\" (UID: \"eb609210-85a4-48b1-94fc-bd35d13b9c3d\") " pod="metallb-system/frr-k8s-2mwsj" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.781891 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jt45h\" (UniqueName: \"kubernetes.io/projected/1e968846-ee36-497c-b325-b3fb9a719dd5-kube-api-access-jt45h\") pod \"frr-k8s-webhook-server-7fcb986d4-qnzzl\" (UID: \"1e968846-ee36-497c-b325-b3fb9a719dd5\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-qnzzl" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.850799 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4f231cac-c8b3-4d09-a4db-b936ea626b09-metrics-certs\") pod \"controller-f8648f98b-4rdck\" (UID: \"4f231cac-c8b3-4d09-a4db-b936ea626b09\") " pod="metallb-system/controller-f8648f98b-4rdck" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.850862 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/6e970302-163f-4c96-9be1-740136174111-metallb-excludel2\") pod \"speaker-hv4z4\" (UID: \"6e970302-163f-4c96-9be1-740136174111\") " pod="metallb-system/speaker-hv4z4" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.850940 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6e970302-163f-4c96-9be1-740136174111-metrics-certs\") pod \"speaker-hv4z4\" (UID: \"6e970302-163f-4c96-9be1-740136174111\") " pod="metallb-system/speaker-hv4z4" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.850964 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ms4sv\" (UniqueName: \"kubernetes.io/projected/4f231cac-c8b3-4d09-a4db-b936ea626b09-kube-api-access-ms4sv\") pod \"controller-f8648f98b-4rdck\" (UID: \"4f231cac-c8b3-4d09-a4db-b936ea626b09\") " pod="metallb-system/controller-f8648f98b-4rdck" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.850992 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4f231cac-c8b3-4d09-a4db-b936ea626b09-cert\") pod \"controller-f8648f98b-4rdck\" (UID: \"4f231cac-c8b3-4d09-a4db-b936ea626b09\") " pod="metallb-system/controller-f8648f98b-4rdck" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.851015 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4lzvf\" (UniqueName: \"kubernetes.io/projected/6e970302-163f-4c96-9be1-740136174111-kube-api-access-4lzvf\") pod \"speaker-hv4z4\" (UID: \"6e970302-163f-4c96-9be1-740136174111\") " pod="metallb-system/speaker-hv4z4" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.851045 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/6e970302-163f-4c96-9be1-740136174111-memberlist\") pod \"speaker-hv4z4\" (UID: \"6e970302-163f-4c96-9be1-740136174111\") " pod="metallb-system/speaker-hv4z4" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.923947 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-qnzzl" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.952753 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/6e970302-163f-4c96-9be1-740136174111-memberlist\") pod \"speaker-hv4z4\" (UID: \"6e970302-163f-4c96-9be1-740136174111\") " pod="metallb-system/speaker-hv4z4" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.953077 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4f231cac-c8b3-4d09-a4db-b936ea626b09-metrics-certs\") pod \"controller-f8648f98b-4rdck\" (UID: \"4f231cac-c8b3-4d09-a4db-b936ea626b09\") " pod="metallb-system/controller-f8648f98b-4rdck" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.953104 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/6e970302-163f-4c96-9be1-740136174111-metallb-excludel2\") pod \"speaker-hv4z4\" (UID: \"6e970302-163f-4c96-9be1-740136174111\") " pod="metallb-system/speaker-hv4z4" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.953137 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6e970302-163f-4c96-9be1-740136174111-metrics-certs\") pod \"speaker-hv4z4\" (UID: \"6e970302-163f-4c96-9be1-740136174111\") " pod="metallb-system/speaker-hv4z4" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.953156 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ms4sv\" (UniqueName: \"kubernetes.io/projected/4f231cac-c8b3-4d09-a4db-b936ea626b09-kube-api-access-ms4sv\") pod \"controller-f8648f98b-4rdck\" (UID: \"4f231cac-c8b3-4d09-a4db-b936ea626b09\") " pod="metallb-system/controller-f8648f98b-4rdck" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.953174 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4f231cac-c8b3-4d09-a4db-b936ea626b09-cert\") pod \"controller-f8648f98b-4rdck\" (UID: \"4f231cac-c8b3-4d09-a4db-b936ea626b09\") " pod="metallb-system/controller-f8648f98b-4rdck" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.953191 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4lzvf\" (UniqueName: \"kubernetes.io/projected/6e970302-163f-4c96-9be1-740136174111-kube-api-access-4lzvf\") pod \"speaker-hv4z4\" (UID: \"6e970302-163f-4c96-9be1-740136174111\") " pod="metallb-system/speaker-hv4z4" Dec 05 06:06:26 crc kubenswrapper[4742]: E1205 06:06:26.953500 4742 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 05 06:06:26 crc kubenswrapper[4742]: E1205 06:06:26.953543 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6e970302-163f-4c96-9be1-740136174111-memberlist podName:6e970302-163f-4c96-9be1-740136174111 nodeName:}" failed. No retries permitted until 2025-12-05 06:06:27.453528999 +0000 UTC m=+863.365664061 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/6e970302-163f-4c96-9be1-740136174111-memberlist") pod "speaker-hv4z4" (UID: "6e970302-163f-4c96-9be1-740136174111") : secret "metallb-memberlist" not found Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.955270 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/6e970302-163f-4c96-9be1-740136174111-metallb-excludel2\") pod \"speaker-hv4z4\" (UID: \"6e970302-163f-4c96-9be1-740136174111\") " pod="metallb-system/speaker-hv4z4" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.958395 4742 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.958571 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6e970302-163f-4c96-9be1-740136174111-metrics-certs\") pod \"speaker-hv4z4\" (UID: \"6e970302-163f-4c96-9be1-740136174111\") " pod="metallb-system/speaker-hv4z4" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.958593 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4f231cac-c8b3-4d09-a4db-b936ea626b09-metrics-certs\") pod \"controller-f8648f98b-4rdck\" (UID: \"4f231cac-c8b3-4d09-a4db-b936ea626b09\") " pod="metallb-system/controller-f8648f98b-4rdck" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.971483 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4f231cac-c8b3-4d09-a4db-b936ea626b09-cert\") pod \"controller-f8648f98b-4rdck\" (UID: \"4f231cac-c8b3-4d09-a4db-b936ea626b09\") " pod="metallb-system/controller-f8648f98b-4rdck" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.973755 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4lzvf\" (UniqueName: \"kubernetes.io/projected/6e970302-163f-4c96-9be1-740136174111-kube-api-access-4lzvf\") pod \"speaker-hv4z4\" (UID: \"6e970302-163f-4c96-9be1-740136174111\") " pod="metallb-system/speaker-hv4z4" Dec 05 06:06:26 crc kubenswrapper[4742]: I1205 06:06:26.974900 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ms4sv\" (UniqueName: \"kubernetes.io/projected/4f231cac-c8b3-4d09-a4db-b936ea626b09-kube-api-access-ms4sv\") pod \"controller-f8648f98b-4rdck\" (UID: \"4f231cac-c8b3-4d09-a4db-b936ea626b09\") " pod="metallb-system/controller-f8648f98b-4rdck" Dec 05 06:06:27 crc kubenswrapper[4742]: I1205 06:06:27.033400 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-4rdck" Dec 05 06:06:27 crc kubenswrapper[4742]: I1205 06:06:27.258072 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/eb609210-85a4-48b1-94fc-bd35d13b9c3d-metrics-certs\") pod \"frr-k8s-2mwsj\" (UID: \"eb609210-85a4-48b1-94fc-bd35d13b9c3d\") " pod="metallb-system/frr-k8s-2mwsj" Dec 05 06:06:27 crc kubenswrapper[4742]: I1205 06:06:27.266557 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/eb609210-85a4-48b1-94fc-bd35d13b9c3d-metrics-certs\") pod \"frr-k8s-2mwsj\" (UID: \"eb609210-85a4-48b1-94fc-bd35d13b9c3d\") " pod="metallb-system/frr-k8s-2mwsj" Dec 05 06:06:27 crc kubenswrapper[4742]: I1205 06:06:27.315960 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-qnzzl"] Dec 05 06:06:27 crc kubenswrapper[4742]: I1205 06:06:27.425764 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-4rdck"] Dec 05 06:06:27 crc kubenswrapper[4742]: W1205 06:06:27.431043 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4f231cac_c8b3_4d09_a4db_b936ea626b09.slice/crio-f19844ff1e81d7416e4fb3f1847f976be91eecf044fae971a5baf7da405fff8e WatchSource:0}: Error finding container f19844ff1e81d7416e4fb3f1847f976be91eecf044fae971a5baf7da405fff8e: Status 404 returned error can't find the container with id f19844ff1e81d7416e4fb3f1847f976be91eecf044fae971a5baf7da405fff8e Dec 05 06:06:27 crc kubenswrapper[4742]: I1205 06:06:27.461393 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/6e970302-163f-4c96-9be1-740136174111-memberlist\") pod \"speaker-hv4z4\" (UID: \"6e970302-163f-4c96-9be1-740136174111\") " pod="metallb-system/speaker-hv4z4" Dec 05 06:06:27 crc kubenswrapper[4742]: E1205 06:06:27.461618 4742 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 05 06:06:27 crc kubenswrapper[4742]: E1205 06:06:27.461741 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6e970302-163f-4c96-9be1-740136174111-memberlist podName:6e970302-163f-4c96-9be1-740136174111 nodeName:}" failed. No retries permitted until 2025-12-05 06:06:28.461713622 +0000 UTC m=+864.373848724 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/6e970302-163f-4c96-9be1-740136174111-memberlist") pod "speaker-hv4z4" (UID: "6e970302-163f-4c96-9be1-740136174111") : secret "metallb-memberlist" not found Dec 05 06:06:27 crc kubenswrapper[4742]: I1205 06:06:27.496209 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-2mwsj" Dec 05 06:06:27 crc kubenswrapper[4742]: I1205 06:06:27.743697 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2mwsj" event={"ID":"eb609210-85a4-48b1-94fc-bd35d13b9c3d","Type":"ContainerStarted","Data":"cfb0535281bd1a1f9908cfc6a4c2f2adb311df8b8a733f15c8707d088e05f628"} Dec 05 06:06:27 crc kubenswrapper[4742]: I1205 06:06:27.745418 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-4rdck" event={"ID":"4f231cac-c8b3-4d09-a4db-b936ea626b09","Type":"ContainerStarted","Data":"7e8ec7bcb0a57698d54730b2da2403b4c6ba1509251f3b757a6af652b8e0dc86"} Dec 05 06:06:27 crc kubenswrapper[4742]: I1205 06:06:27.745466 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-4rdck" event={"ID":"4f231cac-c8b3-4d09-a4db-b936ea626b09","Type":"ContainerStarted","Data":"f19844ff1e81d7416e4fb3f1847f976be91eecf044fae971a5baf7da405fff8e"} Dec 05 06:06:27 crc kubenswrapper[4742]: I1205 06:06:27.746753 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-qnzzl" event={"ID":"1e968846-ee36-497c-b325-b3fb9a719dd5","Type":"ContainerStarted","Data":"55a3aade7d7839b4eeb50889646899f9f9ae4517c450af4d37759ba5f5febdbd"} Dec 05 06:06:28 crc kubenswrapper[4742]: I1205 06:06:28.475131 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/6e970302-163f-4c96-9be1-740136174111-memberlist\") pod \"speaker-hv4z4\" (UID: \"6e970302-163f-4c96-9be1-740136174111\") " pod="metallb-system/speaker-hv4z4" Dec 05 06:06:28 crc kubenswrapper[4742]: I1205 06:06:28.490997 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/6e970302-163f-4c96-9be1-740136174111-memberlist\") pod \"speaker-hv4z4\" (UID: \"6e970302-163f-4c96-9be1-740136174111\") " pod="metallb-system/speaker-hv4z4" Dec 05 06:06:28 crc kubenswrapper[4742]: I1205 06:06:28.517632 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-hv4z4" Dec 05 06:06:28 crc kubenswrapper[4742]: I1205 06:06:28.769527 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-4rdck" event={"ID":"4f231cac-c8b3-4d09-a4db-b936ea626b09","Type":"ContainerStarted","Data":"8ab3622c7f4234e1edc61bcc28709cbf2e3ec70755e4ffe6b456e6f10cbf632a"} Dec 05 06:06:28 crc kubenswrapper[4742]: I1205 06:06:28.769883 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-4rdck" Dec 05 06:06:28 crc kubenswrapper[4742]: I1205 06:06:28.780392 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-hv4z4" event={"ID":"6e970302-163f-4c96-9be1-740136174111","Type":"ContainerStarted","Data":"925c8fda02a0f229393e65edde0a16ca973716c8f017b67771dbf79ff2150612"} Dec 05 06:06:29 crc kubenswrapper[4742]: I1205 06:06:29.793705 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-hv4z4" event={"ID":"6e970302-163f-4c96-9be1-740136174111","Type":"ContainerStarted","Data":"bfa16db24d6bd8ee21502bcfc6318df6a97e9054dad5df311a0c7b6e7c8cf317"} Dec 05 06:06:29 crc kubenswrapper[4742]: I1205 06:06:29.794097 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-hv4z4" event={"ID":"6e970302-163f-4c96-9be1-740136174111","Type":"ContainerStarted","Data":"81259917cfb41309d9f03683bf5436930da4a1f87200cab96497a6afc36ce520"} Dec 05 06:06:29 crc kubenswrapper[4742]: I1205 06:06:29.794269 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-hv4z4" Dec 05 06:06:29 crc kubenswrapper[4742]: I1205 06:06:29.818168 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-4rdck" podStartSLOduration=3.818132946 podStartE2EDuration="3.818132946s" podCreationTimestamp="2025-12-05 06:06:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:06:28.794788329 +0000 UTC m=+864.706923391" watchObservedRunningTime="2025-12-05 06:06:29.818132946 +0000 UTC m=+865.730268008" Dec 05 06:06:29 crc kubenswrapper[4742]: I1205 06:06:29.818767 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-hv4z4" podStartSLOduration=3.81876157 podStartE2EDuration="3.81876157s" podCreationTimestamp="2025-12-05 06:06:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:06:29.815049285 +0000 UTC m=+865.727184347" watchObservedRunningTime="2025-12-05 06:06:29.81876157 +0000 UTC m=+865.730896632" Dec 05 06:06:34 crc kubenswrapper[4742]: I1205 06:06:34.827966 4742 generic.go:334] "Generic (PLEG): container finished" podID="eb609210-85a4-48b1-94fc-bd35d13b9c3d" containerID="96c13b82330beac2e71e948dcefd0ae0bc0cc2cb903d693e5d5ebdc66a303f5b" exitCode=0 Dec 05 06:06:34 crc kubenswrapper[4742]: I1205 06:06:34.828021 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2mwsj" event={"ID":"eb609210-85a4-48b1-94fc-bd35d13b9c3d","Type":"ContainerDied","Data":"96c13b82330beac2e71e948dcefd0ae0bc0cc2cb903d693e5d5ebdc66a303f5b"} Dec 05 06:06:34 crc kubenswrapper[4742]: I1205 06:06:34.833985 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-qnzzl" event={"ID":"1e968846-ee36-497c-b325-b3fb9a719dd5","Type":"ContainerStarted","Data":"04835a8580496fcc33648f82c8b4b7e27ccd2ab30a70c66787da4d5aefc690f1"} Dec 05 06:06:34 crc kubenswrapper[4742]: I1205 06:06:34.834206 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-qnzzl" Dec 05 06:06:34 crc kubenswrapper[4742]: I1205 06:06:34.881989 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-qnzzl" podStartSLOduration=2.085062494 podStartE2EDuration="8.881965805s" podCreationTimestamp="2025-12-05 06:06:26 +0000 UTC" firstStartedPulling="2025-12-05 06:06:27.323581883 +0000 UTC m=+863.235716945" lastFinishedPulling="2025-12-05 06:06:34.120485154 +0000 UTC m=+870.032620256" observedRunningTime="2025-12-05 06:06:34.879185671 +0000 UTC m=+870.791320763" watchObservedRunningTime="2025-12-05 06:06:34.881965805 +0000 UTC m=+870.794100877" Dec 05 06:06:35 crc kubenswrapper[4742]: I1205 06:06:35.843596 4742 generic.go:334] "Generic (PLEG): container finished" podID="eb609210-85a4-48b1-94fc-bd35d13b9c3d" containerID="c0f91cfa2a72eb8a4b3e0fb802762ab1133b98f5c5a154a9b57866fd727aa7e5" exitCode=0 Dec 05 06:06:35 crc kubenswrapper[4742]: I1205 06:06:35.843675 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2mwsj" event={"ID":"eb609210-85a4-48b1-94fc-bd35d13b9c3d","Type":"ContainerDied","Data":"c0f91cfa2a72eb8a4b3e0fb802762ab1133b98f5c5a154a9b57866fd727aa7e5"} Dec 05 06:06:36 crc kubenswrapper[4742]: E1205 06:06:36.150611 4742 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeb609210_85a4_48b1_94fc_bd35d13b9c3d.slice/crio-conmon-654354cc2c42bd25878ffb2ad260931dce595b8beddfd023aa7efa06ed2e1173.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeb609210_85a4_48b1_94fc_bd35d13b9c3d.slice/crio-654354cc2c42bd25878ffb2ad260931dce595b8beddfd023aa7efa06ed2e1173.scope\": RecentStats: unable to find data in memory cache]" Dec 05 06:06:36 crc kubenswrapper[4742]: I1205 06:06:36.852035 4742 generic.go:334] "Generic (PLEG): container finished" podID="eb609210-85a4-48b1-94fc-bd35d13b9c3d" containerID="654354cc2c42bd25878ffb2ad260931dce595b8beddfd023aa7efa06ed2e1173" exitCode=0 Dec 05 06:06:36 crc kubenswrapper[4742]: I1205 06:06:36.852186 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2mwsj" event={"ID":"eb609210-85a4-48b1-94fc-bd35d13b9c3d","Type":"ContainerDied","Data":"654354cc2c42bd25878ffb2ad260931dce595b8beddfd023aa7efa06ed2e1173"} Dec 05 06:06:37 crc kubenswrapper[4742]: I1205 06:06:37.037360 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-4rdck" Dec 05 06:06:37 crc kubenswrapper[4742]: I1205 06:06:37.860227 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2mwsj" event={"ID":"eb609210-85a4-48b1-94fc-bd35d13b9c3d","Type":"ContainerStarted","Data":"4762d0c35ddb8ca06702a1be74d7b13dc4a42bee0a2109eaa9ccdb635bd10e92"} Dec 05 06:06:37 crc kubenswrapper[4742]: I1205 06:06:37.860565 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2mwsj" event={"ID":"eb609210-85a4-48b1-94fc-bd35d13b9c3d","Type":"ContainerStarted","Data":"41a89dfb543a9f744e6d8706e070ac16e64b876108722bf96ffdfcac51cf5b0c"} Dec 05 06:06:37 crc kubenswrapper[4742]: I1205 06:06:37.860580 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2mwsj" event={"ID":"eb609210-85a4-48b1-94fc-bd35d13b9c3d","Type":"ContainerStarted","Data":"dcaeaf48ec04cbfb93caf85b6a92cabead63bdb82b734f7b93793cb6c56291d2"} Dec 05 06:06:37 crc kubenswrapper[4742]: I1205 06:06:37.860594 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2mwsj" event={"ID":"eb609210-85a4-48b1-94fc-bd35d13b9c3d","Type":"ContainerStarted","Data":"f48bb9018b4c419a69c6bc30f86ab58ee05f2c35e4e7dc1ccdb4b18d1469b58b"} Dec 05 06:06:37 crc kubenswrapper[4742]: I1205 06:06:37.860606 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2mwsj" event={"ID":"eb609210-85a4-48b1-94fc-bd35d13b9c3d","Type":"ContainerStarted","Data":"d96395dddb200036e82a7ce55f90e8d44716d01a29387768f634cc3d41e7e1bc"} Dec 05 06:06:38 crc kubenswrapper[4742]: I1205 06:06:38.521680 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-hv4z4" Dec 05 06:06:38 crc kubenswrapper[4742]: I1205 06:06:38.874282 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2mwsj" event={"ID":"eb609210-85a4-48b1-94fc-bd35d13b9c3d","Type":"ContainerStarted","Data":"3d23c2f1f12e3b717e2deb3385109a59bad85bf8e62c75df60f167a40068e64b"} Dec 05 06:06:38 crc kubenswrapper[4742]: I1205 06:06:38.874731 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-2mwsj" Dec 05 06:06:38 crc kubenswrapper[4742]: I1205 06:06:38.904181 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-2mwsj" podStartSLOduration=6.451610216 podStartE2EDuration="12.904148657s" podCreationTimestamp="2025-12-05 06:06:26 +0000 UTC" firstStartedPulling="2025-12-05 06:06:27.671443934 +0000 UTC m=+863.583579016" lastFinishedPulling="2025-12-05 06:06:34.123982385 +0000 UTC m=+870.036117457" observedRunningTime="2025-12-05 06:06:38.901500696 +0000 UTC m=+874.813635818" watchObservedRunningTime="2025-12-05 06:06:38.904148657 +0000 UTC m=+874.816283769" Dec 05 06:06:39 crc kubenswrapper[4742]: I1205 06:06:39.935089 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs"] Dec 05 06:06:39 crc kubenswrapper[4742]: I1205 06:06:39.937215 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs" Dec 05 06:06:39 crc kubenswrapper[4742]: I1205 06:06:39.939205 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 06:06:39 crc kubenswrapper[4742]: I1205 06:06:39.941621 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs"] Dec 05 06:06:40 crc kubenswrapper[4742]: I1205 06:06:40.041193 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2401898b-c4d3-4155-b6af-4ed889d837d5-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs\" (UID: \"2401898b-c4d3-4155-b6af-4ed889d837d5\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs" Dec 05 06:06:40 crc kubenswrapper[4742]: I1205 06:06:40.041299 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j72xj\" (UniqueName: \"kubernetes.io/projected/2401898b-c4d3-4155-b6af-4ed889d837d5-kube-api-access-j72xj\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs\" (UID: \"2401898b-c4d3-4155-b6af-4ed889d837d5\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs" Dec 05 06:06:40 crc kubenswrapper[4742]: I1205 06:06:40.041328 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2401898b-c4d3-4155-b6af-4ed889d837d5-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs\" (UID: \"2401898b-c4d3-4155-b6af-4ed889d837d5\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs" Dec 05 06:06:40 crc kubenswrapper[4742]: I1205 06:06:40.142609 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j72xj\" (UniqueName: \"kubernetes.io/projected/2401898b-c4d3-4155-b6af-4ed889d837d5-kube-api-access-j72xj\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs\" (UID: \"2401898b-c4d3-4155-b6af-4ed889d837d5\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs" Dec 05 06:06:40 crc kubenswrapper[4742]: I1205 06:06:40.142661 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2401898b-c4d3-4155-b6af-4ed889d837d5-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs\" (UID: \"2401898b-c4d3-4155-b6af-4ed889d837d5\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs" Dec 05 06:06:40 crc kubenswrapper[4742]: I1205 06:06:40.142760 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2401898b-c4d3-4155-b6af-4ed889d837d5-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs\" (UID: \"2401898b-c4d3-4155-b6af-4ed889d837d5\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs" Dec 05 06:06:40 crc kubenswrapper[4742]: I1205 06:06:40.143173 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2401898b-c4d3-4155-b6af-4ed889d837d5-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs\" (UID: \"2401898b-c4d3-4155-b6af-4ed889d837d5\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs" Dec 05 06:06:40 crc kubenswrapper[4742]: I1205 06:06:40.143205 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2401898b-c4d3-4155-b6af-4ed889d837d5-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs\" (UID: \"2401898b-c4d3-4155-b6af-4ed889d837d5\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs" Dec 05 06:06:40 crc kubenswrapper[4742]: I1205 06:06:40.160283 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j72xj\" (UniqueName: \"kubernetes.io/projected/2401898b-c4d3-4155-b6af-4ed889d837d5-kube-api-access-j72xj\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs\" (UID: \"2401898b-c4d3-4155-b6af-4ed889d837d5\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs" Dec 05 06:06:40 crc kubenswrapper[4742]: I1205 06:06:40.274683 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs" Dec 05 06:06:40 crc kubenswrapper[4742]: I1205 06:06:40.497021 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs"] Dec 05 06:06:40 crc kubenswrapper[4742]: W1205 06:06:40.512341 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2401898b_c4d3_4155_b6af_4ed889d837d5.slice/crio-d7634d01dbaa588355a6b2a80e6dde087cc3a55f4c5929f66634acdb408aef10 WatchSource:0}: Error finding container d7634d01dbaa588355a6b2a80e6dde087cc3a55f4c5929f66634acdb408aef10: Status 404 returned error can't find the container with id d7634d01dbaa588355a6b2a80e6dde087cc3a55f4c5929f66634acdb408aef10 Dec 05 06:06:40 crc kubenswrapper[4742]: I1205 06:06:40.897451 4742 generic.go:334] "Generic (PLEG): container finished" podID="2401898b-c4d3-4155-b6af-4ed889d837d5" containerID="4467e8c0e82901fbb67c59e6f3a1389c844a3055aae63af3322a2bb332dc0c03" exitCode=0 Dec 05 06:06:40 crc kubenswrapper[4742]: I1205 06:06:40.897501 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs" event={"ID":"2401898b-c4d3-4155-b6af-4ed889d837d5","Type":"ContainerDied","Data":"4467e8c0e82901fbb67c59e6f3a1389c844a3055aae63af3322a2bb332dc0c03"} Dec 05 06:06:40 crc kubenswrapper[4742]: I1205 06:06:40.897546 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs" event={"ID":"2401898b-c4d3-4155-b6af-4ed889d837d5","Type":"ContainerStarted","Data":"d7634d01dbaa588355a6b2a80e6dde087cc3a55f4c5929f66634acdb408aef10"} Dec 05 06:06:42 crc kubenswrapper[4742]: I1205 06:06:42.497457 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-2mwsj" Dec 05 06:06:42 crc kubenswrapper[4742]: I1205 06:06:42.532276 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-2mwsj" Dec 05 06:06:44 crc kubenswrapper[4742]: I1205 06:06:44.933346 4742 generic.go:334] "Generic (PLEG): container finished" podID="2401898b-c4d3-4155-b6af-4ed889d837d5" containerID="2a3761380789ecf582c4296a4c6e9fe10647925821a6e691021ec18dcc09472a" exitCode=0 Dec 05 06:06:44 crc kubenswrapper[4742]: I1205 06:06:44.933527 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs" event={"ID":"2401898b-c4d3-4155-b6af-4ed889d837d5","Type":"ContainerDied","Data":"2a3761380789ecf582c4296a4c6e9fe10647925821a6e691021ec18dcc09472a"} Dec 05 06:06:45 crc kubenswrapper[4742]: I1205 06:06:45.947522 4742 generic.go:334] "Generic (PLEG): container finished" podID="2401898b-c4d3-4155-b6af-4ed889d837d5" containerID="a5943a996739e47874fc0861960f6efac9cd954a911f3abfa419b3384c63209f" exitCode=0 Dec 05 06:06:45 crc kubenswrapper[4742]: I1205 06:06:45.947723 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs" event={"ID":"2401898b-c4d3-4155-b6af-4ed889d837d5","Type":"ContainerDied","Data":"a5943a996739e47874fc0861960f6efac9cd954a911f3abfa419b3384c63209f"} Dec 05 06:06:46 crc kubenswrapper[4742]: I1205 06:06:46.930743 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-qnzzl" Dec 05 06:06:47 crc kubenswrapper[4742]: I1205 06:06:47.246289 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs" Dec 05 06:06:47 crc kubenswrapper[4742]: I1205 06:06:47.352537 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j72xj\" (UniqueName: \"kubernetes.io/projected/2401898b-c4d3-4155-b6af-4ed889d837d5-kube-api-access-j72xj\") pod \"2401898b-c4d3-4155-b6af-4ed889d837d5\" (UID: \"2401898b-c4d3-4155-b6af-4ed889d837d5\") " Dec 05 06:06:47 crc kubenswrapper[4742]: I1205 06:06:47.352642 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2401898b-c4d3-4155-b6af-4ed889d837d5-util\") pod \"2401898b-c4d3-4155-b6af-4ed889d837d5\" (UID: \"2401898b-c4d3-4155-b6af-4ed889d837d5\") " Dec 05 06:06:47 crc kubenswrapper[4742]: I1205 06:06:47.352664 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2401898b-c4d3-4155-b6af-4ed889d837d5-bundle\") pod \"2401898b-c4d3-4155-b6af-4ed889d837d5\" (UID: \"2401898b-c4d3-4155-b6af-4ed889d837d5\") " Dec 05 06:06:47 crc kubenswrapper[4742]: I1205 06:06:47.354360 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2401898b-c4d3-4155-b6af-4ed889d837d5-bundle" (OuterVolumeSpecName: "bundle") pod "2401898b-c4d3-4155-b6af-4ed889d837d5" (UID: "2401898b-c4d3-4155-b6af-4ed889d837d5"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:06:47 crc kubenswrapper[4742]: I1205 06:06:47.358533 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2401898b-c4d3-4155-b6af-4ed889d837d5-kube-api-access-j72xj" (OuterVolumeSpecName: "kube-api-access-j72xj") pod "2401898b-c4d3-4155-b6af-4ed889d837d5" (UID: "2401898b-c4d3-4155-b6af-4ed889d837d5"). InnerVolumeSpecName "kube-api-access-j72xj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:06:47 crc kubenswrapper[4742]: I1205 06:06:47.366977 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2401898b-c4d3-4155-b6af-4ed889d837d5-util" (OuterVolumeSpecName: "util") pod "2401898b-c4d3-4155-b6af-4ed889d837d5" (UID: "2401898b-c4d3-4155-b6af-4ed889d837d5"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:06:47 crc kubenswrapper[4742]: I1205 06:06:47.454443 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j72xj\" (UniqueName: \"kubernetes.io/projected/2401898b-c4d3-4155-b6af-4ed889d837d5-kube-api-access-j72xj\") on node \"crc\" DevicePath \"\"" Dec 05 06:06:47 crc kubenswrapper[4742]: I1205 06:06:47.454499 4742 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2401898b-c4d3-4155-b6af-4ed889d837d5-util\") on node \"crc\" DevicePath \"\"" Dec 05 06:06:47 crc kubenswrapper[4742]: I1205 06:06:47.454527 4742 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2401898b-c4d3-4155-b6af-4ed889d837d5-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:06:47 crc kubenswrapper[4742]: I1205 06:06:47.503667 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-2mwsj" Dec 05 06:06:47 crc kubenswrapper[4742]: I1205 06:06:47.963468 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs" event={"ID":"2401898b-c4d3-4155-b6af-4ed889d837d5","Type":"ContainerDied","Data":"d7634d01dbaa588355a6b2a80e6dde087cc3a55f4c5929f66634acdb408aef10"} Dec 05 06:06:47 crc kubenswrapper[4742]: I1205 06:06:47.963501 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs" Dec 05 06:06:47 crc kubenswrapper[4742]: I1205 06:06:47.963507 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d7634d01dbaa588355a6b2a80e6dde087cc3a55f4c5929f66634acdb408aef10" Dec 05 06:06:53 crc kubenswrapper[4742]: I1205 06:06:53.537376 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-bl8d8"] Dec 05 06:06:53 crc kubenswrapper[4742]: E1205 06:06:53.538071 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2401898b-c4d3-4155-b6af-4ed889d837d5" containerName="extract" Dec 05 06:06:53 crc kubenswrapper[4742]: I1205 06:06:53.538085 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="2401898b-c4d3-4155-b6af-4ed889d837d5" containerName="extract" Dec 05 06:06:53 crc kubenswrapper[4742]: E1205 06:06:53.538098 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2401898b-c4d3-4155-b6af-4ed889d837d5" containerName="pull" Dec 05 06:06:53 crc kubenswrapper[4742]: I1205 06:06:53.538104 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="2401898b-c4d3-4155-b6af-4ed889d837d5" containerName="pull" Dec 05 06:06:53 crc kubenswrapper[4742]: E1205 06:06:53.538116 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2401898b-c4d3-4155-b6af-4ed889d837d5" containerName="util" Dec 05 06:06:53 crc kubenswrapper[4742]: I1205 06:06:53.538123 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="2401898b-c4d3-4155-b6af-4ed889d837d5" containerName="util" Dec 05 06:06:53 crc kubenswrapper[4742]: I1205 06:06:53.538218 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="2401898b-c4d3-4155-b6af-4ed889d837d5" containerName="extract" Dec 05 06:06:53 crc kubenswrapper[4742]: I1205 06:06:53.538591 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-bl8d8" Dec 05 06:06:53 crc kubenswrapper[4742]: I1205 06:06:53.543661 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Dec 05 06:06:53 crc kubenswrapper[4742]: I1205 06:06:53.544332 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Dec 05 06:06:53 crc kubenswrapper[4742]: I1205 06:06:53.549121 4742 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-7xcxh" Dec 05 06:06:53 crc kubenswrapper[4742]: I1205 06:06:53.563743 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-bl8d8"] Dec 05 06:06:53 crc kubenswrapper[4742]: I1205 06:06:53.636230 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmxc6\" (UniqueName: \"kubernetes.io/projected/10259f34-ccdb-4666-8ab3-3b9c21a9103d-kube-api-access-bmxc6\") pod \"cert-manager-operator-controller-manager-64cf6dff88-bl8d8\" (UID: \"10259f34-ccdb-4666-8ab3-3b9c21a9103d\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-bl8d8" Dec 05 06:06:53 crc kubenswrapper[4742]: I1205 06:06:53.636281 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/10259f34-ccdb-4666-8ab3-3b9c21a9103d-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-bl8d8\" (UID: \"10259f34-ccdb-4666-8ab3-3b9c21a9103d\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-bl8d8" Dec 05 06:06:53 crc kubenswrapper[4742]: I1205 06:06:53.738208 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmxc6\" (UniqueName: \"kubernetes.io/projected/10259f34-ccdb-4666-8ab3-3b9c21a9103d-kube-api-access-bmxc6\") pod \"cert-manager-operator-controller-manager-64cf6dff88-bl8d8\" (UID: \"10259f34-ccdb-4666-8ab3-3b9c21a9103d\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-bl8d8" Dec 05 06:06:53 crc kubenswrapper[4742]: I1205 06:06:53.738680 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/10259f34-ccdb-4666-8ab3-3b9c21a9103d-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-bl8d8\" (UID: \"10259f34-ccdb-4666-8ab3-3b9c21a9103d\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-bl8d8" Dec 05 06:06:53 crc kubenswrapper[4742]: I1205 06:06:53.739307 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/10259f34-ccdb-4666-8ab3-3b9c21a9103d-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-bl8d8\" (UID: \"10259f34-ccdb-4666-8ab3-3b9c21a9103d\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-bl8d8" Dec 05 06:06:53 crc kubenswrapper[4742]: I1205 06:06:53.772151 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmxc6\" (UniqueName: \"kubernetes.io/projected/10259f34-ccdb-4666-8ab3-3b9c21a9103d-kube-api-access-bmxc6\") pod \"cert-manager-operator-controller-manager-64cf6dff88-bl8d8\" (UID: \"10259f34-ccdb-4666-8ab3-3b9c21a9103d\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-bl8d8" Dec 05 06:06:53 crc kubenswrapper[4742]: I1205 06:06:53.858092 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-bl8d8" Dec 05 06:06:54 crc kubenswrapper[4742]: I1205 06:06:54.176298 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-bl8d8"] Dec 05 06:06:55 crc kubenswrapper[4742]: I1205 06:06:55.011316 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-bl8d8" event={"ID":"10259f34-ccdb-4666-8ab3-3b9c21a9103d","Type":"ContainerStarted","Data":"4eb7171ff682977eb680175d6e597a47460a30a3fc4b8712824295e50c708ef5"} Dec 05 06:07:02 crc kubenswrapper[4742]: I1205 06:07:02.073346 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-bl8d8" event={"ID":"10259f34-ccdb-4666-8ab3-3b9c21a9103d","Type":"ContainerStarted","Data":"cdfd28a2522fc3ea105e8fcb1043964f1f44e5cb13b1d1516287b1cb7796a340"} Dec 05 06:07:06 crc kubenswrapper[4742]: I1205 06:07:06.405580 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-bl8d8" podStartSLOduration=6.198133212 podStartE2EDuration="13.405550672s" podCreationTimestamp="2025-12-05 06:06:53 +0000 UTC" firstStartedPulling="2025-12-05 06:06:54.184597623 +0000 UTC m=+890.096732685" lastFinishedPulling="2025-12-05 06:07:01.392015083 +0000 UTC m=+897.304150145" observedRunningTime="2025-12-05 06:07:02.109869336 +0000 UTC m=+898.022004398" watchObservedRunningTime="2025-12-05 06:07:06.405550672 +0000 UTC m=+902.317685774" Dec 05 06:07:06 crc kubenswrapper[4742]: I1205 06:07:06.417043 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-kvfb8"] Dec 05 06:07:06 crc kubenswrapper[4742]: I1205 06:07:06.418353 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-kvfb8" Dec 05 06:07:06 crc kubenswrapper[4742]: I1205 06:07:06.420279 4742 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-zgjw5" Dec 05 06:07:06 crc kubenswrapper[4742]: I1205 06:07:06.420839 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 05 06:07:06 crc kubenswrapper[4742]: I1205 06:07:06.421336 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 05 06:07:06 crc kubenswrapper[4742]: I1205 06:07:06.429002 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-kvfb8"] Dec 05 06:07:06 crc kubenswrapper[4742]: I1205 06:07:06.543493 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwc6t\" (UniqueName: \"kubernetes.io/projected/00f1d295-623d-4eb6-949b-f51674189d91-kube-api-access-lwc6t\") pod \"cert-manager-cainjector-855d9ccff4-kvfb8\" (UID: \"00f1d295-623d-4eb6-949b-f51674189d91\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-kvfb8" Dec 05 06:07:06 crc kubenswrapper[4742]: I1205 06:07:06.543570 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/00f1d295-623d-4eb6-949b-f51674189d91-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-kvfb8\" (UID: \"00f1d295-623d-4eb6-949b-f51674189d91\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-kvfb8" Dec 05 06:07:06 crc kubenswrapper[4742]: I1205 06:07:06.644540 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwc6t\" (UniqueName: \"kubernetes.io/projected/00f1d295-623d-4eb6-949b-f51674189d91-kube-api-access-lwc6t\") pod \"cert-manager-cainjector-855d9ccff4-kvfb8\" (UID: \"00f1d295-623d-4eb6-949b-f51674189d91\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-kvfb8" Dec 05 06:07:06 crc kubenswrapper[4742]: I1205 06:07:06.644596 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/00f1d295-623d-4eb6-949b-f51674189d91-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-kvfb8\" (UID: \"00f1d295-623d-4eb6-949b-f51674189d91\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-kvfb8" Dec 05 06:07:06 crc kubenswrapper[4742]: I1205 06:07:06.678824 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/00f1d295-623d-4eb6-949b-f51674189d91-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-kvfb8\" (UID: \"00f1d295-623d-4eb6-949b-f51674189d91\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-kvfb8" Dec 05 06:07:06 crc kubenswrapper[4742]: I1205 06:07:06.687663 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwc6t\" (UniqueName: \"kubernetes.io/projected/00f1d295-623d-4eb6-949b-f51674189d91-kube-api-access-lwc6t\") pod \"cert-manager-cainjector-855d9ccff4-kvfb8\" (UID: \"00f1d295-623d-4eb6-949b-f51674189d91\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-kvfb8" Dec 05 06:07:06 crc kubenswrapper[4742]: I1205 06:07:06.739739 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-kvfb8" Dec 05 06:07:07 crc kubenswrapper[4742]: I1205 06:07:07.159137 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-kvfb8"] Dec 05 06:07:07 crc kubenswrapper[4742]: W1205 06:07:07.169823 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod00f1d295_623d_4eb6_949b_f51674189d91.slice/crio-d7e660f14db040480c9ebc1d91d55407fa2063c76772642834e5f55b3af11308 WatchSource:0}: Error finding container d7e660f14db040480c9ebc1d91d55407fa2063c76772642834e5f55b3af11308: Status 404 returned error can't find the container with id d7e660f14db040480c9ebc1d91d55407fa2063c76772642834e5f55b3af11308 Dec 05 06:07:07 crc kubenswrapper[4742]: I1205 06:07:07.616388 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-6ggsv"] Dec 05 06:07:07 crc kubenswrapper[4742]: I1205 06:07:07.626087 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-6ggsv" Dec 05 06:07:07 crc kubenswrapper[4742]: I1205 06:07:07.636574 4742 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-vvkjx" Dec 05 06:07:07 crc kubenswrapper[4742]: I1205 06:07:07.650848 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-6ggsv"] Dec 05 06:07:07 crc kubenswrapper[4742]: I1205 06:07:07.659067 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qtd4\" (UniqueName: \"kubernetes.io/projected/74f061b9-beb2-4a01-bfd1-d0b1ad1ffe24-kube-api-access-2qtd4\") pod \"cert-manager-webhook-f4fb5df64-6ggsv\" (UID: \"74f061b9-beb2-4a01-bfd1-d0b1ad1ffe24\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-6ggsv" Dec 05 06:07:07 crc kubenswrapper[4742]: I1205 06:07:07.659130 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/74f061b9-beb2-4a01-bfd1-d0b1ad1ffe24-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-6ggsv\" (UID: \"74f061b9-beb2-4a01-bfd1-d0b1ad1ffe24\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-6ggsv" Dec 05 06:07:07 crc kubenswrapper[4742]: I1205 06:07:07.760346 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qtd4\" (UniqueName: \"kubernetes.io/projected/74f061b9-beb2-4a01-bfd1-d0b1ad1ffe24-kube-api-access-2qtd4\") pod \"cert-manager-webhook-f4fb5df64-6ggsv\" (UID: \"74f061b9-beb2-4a01-bfd1-d0b1ad1ffe24\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-6ggsv" Dec 05 06:07:07 crc kubenswrapper[4742]: I1205 06:07:07.760418 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/74f061b9-beb2-4a01-bfd1-d0b1ad1ffe24-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-6ggsv\" (UID: \"74f061b9-beb2-4a01-bfd1-d0b1ad1ffe24\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-6ggsv" Dec 05 06:07:07 crc kubenswrapper[4742]: I1205 06:07:07.789152 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/74f061b9-beb2-4a01-bfd1-d0b1ad1ffe24-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-6ggsv\" (UID: \"74f061b9-beb2-4a01-bfd1-d0b1ad1ffe24\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-6ggsv" Dec 05 06:07:07 crc kubenswrapper[4742]: I1205 06:07:07.795141 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qtd4\" (UniqueName: \"kubernetes.io/projected/74f061b9-beb2-4a01-bfd1-d0b1ad1ffe24-kube-api-access-2qtd4\") pod \"cert-manager-webhook-f4fb5df64-6ggsv\" (UID: \"74f061b9-beb2-4a01-bfd1-d0b1ad1ffe24\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-6ggsv" Dec 05 06:07:08 crc kubenswrapper[4742]: I1205 06:07:08.025545 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-6ggsv" Dec 05 06:07:08 crc kubenswrapper[4742]: I1205 06:07:08.123539 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-kvfb8" event={"ID":"00f1d295-623d-4eb6-949b-f51674189d91","Type":"ContainerStarted","Data":"d7e660f14db040480c9ebc1d91d55407fa2063c76772642834e5f55b3af11308"} Dec 05 06:07:08 crc kubenswrapper[4742]: I1205 06:07:08.438752 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-6ggsv"] Dec 05 06:07:08 crc kubenswrapper[4742]: W1205 06:07:08.447233 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod74f061b9_beb2_4a01_bfd1_d0b1ad1ffe24.slice/crio-85030755b938884b59527dfdc655b7306e2d0307e54aed50eb784f196009dc52 WatchSource:0}: Error finding container 85030755b938884b59527dfdc655b7306e2d0307e54aed50eb784f196009dc52: Status 404 returned error can't find the container with id 85030755b938884b59527dfdc655b7306e2d0307e54aed50eb784f196009dc52 Dec 05 06:07:09 crc kubenswrapper[4742]: I1205 06:07:09.131341 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-6ggsv" event={"ID":"74f061b9-beb2-4a01-bfd1-d0b1ad1ffe24","Type":"ContainerStarted","Data":"85030755b938884b59527dfdc655b7306e2d0307e54aed50eb784f196009dc52"} Dec 05 06:07:11 crc kubenswrapper[4742]: I1205 06:07:11.496558 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-c2k4r"] Dec 05 06:07:11 crc kubenswrapper[4742]: I1205 06:07:11.498303 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c2k4r" Dec 05 06:07:11 crc kubenswrapper[4742]: I1205 06:07:11.508407 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c2k4r"] Dec 05 06:07:11 crc kubenswrapper[4742]: I1205 06:07:11.518684 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4fef7b7-3c1a-4732-8710-ac825fca1fea-utilities\") pod \"certified-operators-c2k4r\" (UID: \"e4fef7b7-3c1a-4732-8710-ac825fca1fea\") " pod="openshift-marketplace/certified-operators-c2k4r" Dec 05 06:07:11 crc kubenswrapper[4742]: I1205 06:07:11.518752 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thjf9\" (UniqueName: \"kubernetes.io/projected/e4fef7b7-3c1a-4732-8710-ac825fca1fea-kube-api-access-thjf9\") pod \"certified-operators-c2k4r\" (UID: \"e4fef7b7-3c1a-4732-8710-ac825fca1fea\") " pod="openshift-marketplace/certified-operators-c2k4r" Dec 05 06:07:11 crc kubenswrapper[4742]: I1205 06:07:11.518810 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4fef7b7-3c1a-4732-8710-ac825fca1fea-catalog-content\") pod \"certified-operators-c2k4r\" (UID: \"e4fef7b7-3c1a-4732-8710-ac825fca1fea\") " pod="openshift-marketplace/certified-operators-c2k4r" Dec 05 06:07:11 crc kubenswrapper[4742]: I1205 06:07:11.620312 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4fef7b7-3c1a-4732-8710-ac825fca1fea-utilities\") pod \"certified-operators-c2k4r\" (UID: \"e4fef7b7-3c1a-4732-8710-ac825fca1fea\") " pod="openshift-marketplace/certified-operators-c2k4r" Dec 05 06:07:11 crc kubenswrapper[4742]: I1205 06:07:11.620373 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thjf9\" (UniqueName: \"kubernetes.io/projected/e4fef7b7-3c1a-4732-8710-ac825fca1fea-kube-api-access-thjf9\") pod \"certified-operators-c2k4r\" (UID: \"e4fef7b7-3c1a-4732-8710-ac825fca1fea\") " pod="openshift-marketplace/certified-operators-c2k4r" Dec 05 06:07:11 crc kubenswrapper[4742]: I1205 06:07:11.620405 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4fef7b7-3c1a-4732-8710-ac825fca1fea-catalog-content\") pod \"certified-operators-c2k4r\" (UID: \"e4fef7b7-3c1a-4732-8710-ac825fca1fea\") " pod="openshift-marketplace/certified-operators-c2k4r" Dec 05 06:07:11 crc kubenswrapper[4742]: I1205 06:07:11.620920 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4fef7b7-3c1a-4732-8710-ac825fca1fea-utilities\") pod \"certified-operators-c2k4r\" (UID: \"e4fef7b7-3c1a-4732-8710-ac825fca1fea\") " pod="openshift-marketplace/certified-operators-c2k4r" Dec 05 06:07:11 crc kubenswrapper[4742]: I1205 06:07:11.620946 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4fef7b7-3c1a-4732-8710-ac825fca1fea-catalog-content\") pod \"certified-operators-c2k4r\" (UID: \"e4fef7b7-3c1a-4732-8710-ac825fca1fea\") " pod="openshift-marketplace/certified-operators-c2k4r" Dec 05 06:07:11 crc kubenswrapper[4742]: I1205 06:07:11.648080 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thjf9\" (UniqueName: \"kubernetes.io/projected/e4fef7b7-3c1a-4732-8710-ac825fca1fea-kube-api-access-thjf9\") pod \"certified-operators-c2k4r\" (UID: \"e4fef7b7-3c1a-4732-8710-ac825fca1fea\") " pod="openshift-marketplace/certified-operators-c2k4r" Dec 05 06:07:11 crc kubenswrapper[4742]: I1205 06:07:11.821289 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c2k4r" Dec 05 06:07:15 crc kubenswrapper[4742]: I1205 06:07:15.593328 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c2k4r"] Dec 05 06:07:15 crc kubenswrapper[4742]: W1205 06:07:15.602540 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode4fef7b7_3c1a_4732_8710_ac825fca1fea.slice/crio-76a2ceb66589659adae3f11c55849630f5c86cdfaf912ececdbe37b88f48d114 WatchSource:0}: Error finding container 76a2ceb66589659adae3f11c55849630f5c86cdfaf912ececdbe37b88f48d114: Status 404 returned error can't find the container with id 76a2ceb66589659adae3f11c55849630f5c86cdfaf912ececdbe37b88f48d114 Dec 05 06:07:16 crc kubenswrapper[4742]: I1205 06:07:16.183274 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-kvfb8" event={"ID":"00f1d295-623d-4eb6-949b-f51674189d91","Type":"ContainerStarted","Data":"44bd2cd35d16308050e3e8d09a2db891b79aa817749422e99eaf780990f4a3a1"} Dec 05 06:07:16 crc kubenswrapper[4742]: I1205 06:07:16.185244 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-6ggsv" event={"ID":"74f061b9-beb2-4a01-bfd1-d0b1ad1ffe24","Type":"ContainerStarted","Data":"3ae34f446b0d1cacbbdd81254793fc5af39f63e1b50fb5f824a2b3dd217632e4"} Dec 05 06:07:16 crc kubenswrapper[4742]: I1205 06:07:16.185346 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-f4fb5df64-6ggsv" Dec 05 06:07:16 crc kubenswrapper[4742]: I1205 06:07:16.187400 4742 generic.go:334] "Generic (PLEG): container finished" podID="e4fef7b7-3c1a-4732-8710-ac825fca1fea" containerID="113080644dc5c27081f011c664020d9bcb0351dc278860fa7ec579c058ff374a" exitCode=0 Dec 05 06:07:16 crc kubenswrapper[4742]: I1205 06:07:16.187504 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c2k4r" event={"ID":"e4fef7b7-3c1a-4732-8710-ac825fca1fea","Type":"ContainerDied","Data":"113080644dc5c27081f011c664020d9bcb0351dc278860fa7ec579c058ff374a"} Dec 05 06:07:16 crc kubenswrapper[4742]: I1205 06:07:16.187574 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c2k4r" event={"ID":"e4fef7b7-3c1a-4732-8710-ac825fca1fea","Type":"ContainerStarted","Data":"76a2ceb66589659adae3f11c55849630f5c86cdfaf912ececdbe37b88f48d114"} Dec 05 06:07:16 crc kubenswrapper[4742]: I1205 06:07:16.209517 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-855d9ccff4-kvfb8" podStartSLOduration=1.8748510280000001 podStartE2EDuration="10.209494963s" podCreationTimestamp="2025-12-05 06:07:06 +0000 UTC" firstStartedPulling="2025-12-05 06:07:07.174502385 +0000 UTC m=+903.086637457" lastFinishedPulling="2025-12-05 06:07:15.50914633 +0000 UTC m=+911.421281392" observedRunningTime="2025-12-05 06:07:16.207045648 +0000 UTC m=+912.119180730" watchObservedRunningTime="2025-12-05 06:07:16.209494963 +0000 UTC m=+912.121630045" Dec 05 06:07:16 crc kubenswrapper[4742]: I1205 06:07:16.231593 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-f4fb5df64-6ggsv" podStartSLOduration=2.194352031 podStartE2EDuration="9.231578895s" podCreationTimestamp="2025-12-05 06:07:07 +0000 UTC" firstStartedPulling="2025-12-05 06:07:08.448735824 +0000 UTC m=+904.360870886" lastFinishedPulling="2025-12-05 06:07:15.485962688 +0000 UTC m=+911.398097750" observedRunningTime="2025-12-05 06:07:16.226915952 +0000 UTC m=+912.139051024" watchObservedRunningTime="2025-12-05 06:07:16.231578895 +0000 UTC m=+912.143713957" Dec 05 06:07:16 crc kubenswrapper[4742]: I1205 06:07:16.670621 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:07:16 crc kubenswrapper[4742]: I1205 06:07:16.670941 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:07:17 crc kubenswrapper[4742]: I1205 06:07:17.196625 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c2k4r" event={"ID":"e4fef7b7-3c1a-4732-8710-ac825fca1fea","Type":"ContainerStarted","Data":"e4da84089c7b4290623b1664b94657d09fc904251641f6c9d90301c31eb2208a"} Dec 05 06:07:18 crc kubenswrapper[4742]: I1205 06:07:18.204486 4742 generic.go:334] "Generic (PLEG): container finished" podID="e4fef7b7-3c1a-4732-8710-ac825fca1fea" containerID="e4da84089c7b4290623b1664b94657d09fc904251641f6c9d90301c31eb2208a" exitCode=0 Dec 05 06:07:18 crc kubenswrapper[4742]: I1205 06:07:18.204522 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c2k4r" event={"ID":"e4fef7b7-3c1a-4732-8710-ac825fca1fea","Type":"ContainerDied","Data":"e4da84089c7b4290623b1664b94657d09fc904251641f6c9d90301c31eb2208a"} Dec 05 06:07:18 crc kubenswrapper[4742]: I1205 06:07:18.204571 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c2k4r" event={"ID":"e4fef7b7-3c1a-4732-8710-ac825fca1fea","Type":"ContainerStarted","Data":"2ee8c4f0acc7b780f12096055b1a02fa1a069b63115f36c4ef9190e1909d47b0"} Dec 05 06:07:18 crc kubenswrapper[4742]: I1205 06:07:18.225761 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-c2k4r" podStartSLOduration=5.812381386 podStartE2EDuration="7.225738754s" podCreationTimestamp="2025-12-05 06:07:11 +0000 UTC" firstStartedPulling="2025-12-05 06:07:16.189880256 +0000 UTC m=+912.102015328" lastFinishedPulling="2025-12-05 06:07:17.603237604 +0000 UTC m=+913.515372696" observedRunningTime="2025-12-05 06:07:18.224182533 +0000 UTC m=+914.136317805" watchObservedRunningTime="2025-12-05 06:07:18.225738754 +0000 UTC m=+914.137873826" Dec 05 06:07:21 crc kubenswrapper[4742]: I1205 06:07:21.822626 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-c2k4r" Dec 05 06:07:21 crc kubenswrapper[4742]: I1205 06:07:21.823283 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-c2k4r" Dec 05 06:07:21 crc kubenswrapper[4742]: I1205 06:07:21.873902 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-c2k4r" Dec 05 06:07:22 crc kubenswrapper[4742]: I1205 06:07:22.308044 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-c2k4r" Dec 05 06:07:22 crc kubenswrapper[4742]: I1205 06:07:22.368778 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c2k4r"] Dec 05 06:07:23 crc kubenswrapper[4742]: I1205 06:07:23.030146 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-f4fb5df64-6ggsv" Dec 05 06:07:23 crc kubenswrapper[4742]: I1205 06:07:23.384503 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-86cb77c54b-rd89x"] Dec 05 06:07:23 crc kubenswrapper[4742]: I1205 06:07:23.385215 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-rd89x" Dec 05 06:07:23 crc kubenswrapper[4742]: I1205 06:07:23.387838 4742 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-6t2tr" Dec 05 06:07:23 crc kubenswrapper[4742]: I1205 06:07:23.410299 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-rd89x"] Dec 05 06:07:23 crc kubenswrapper[4742]: I1205 06:07:23.491333 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fk4t7\" (UniqueName: \"kubernetes.io/projected/9e4497f9-5c79-4ab4-b6fd-501ba13e3ede-kube-api-access-fk4t7\") pod \"cert-manager-86cb77c54b-rd89x\" (UID: \"9e4497f9-5c79-4ab4-b6fd-501ba13e3ede\") " pod="cert-manager/cert-manager-86cb77c54b-rd89x" Dec 05 06:07:23 crc kubenswrapper[4742]: I1205 06:07:23.491449 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9e4497f9-5c79-4ab4-b6fd-501ba13e3ede-bound-sa-token\") pod \"cert-manager-86cb77c54b-rd89x\" (UID: \"9e4497f9-5c79-4ab4-b6fd-501ba13e3ede\") " pod="cert-manager/cert-manager-86cb77c54b-rd89x" Dec 05 06:07:23 crc kubenswrapper[4742]: I1205 06:07:23.592093 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fk4t7\" (UniqueName: \"kubernetes.io/projected/9e4497f9-5c79-4ab4-b6fd-501ba13e3ede-kube-api-access-fk4t7\") pod \"cert-manager-86cb77c54b-rd89x\" (UID: \"9e4497f9-5c79-4ab4-b6fd-501ba13e3ede\") " pod="cert-manager/cert-manager-86cb77c54b-rd89x" Dec 05 06:07:23 crc kubenswrapper[4742]: I1205 06:07:23.592193 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9e4497f9-5c79-4ab4-b6fd-501ba13e3ede-bound-sa-token\") pod \"cert-manager-86cb77c54b-rd89x\" (UID: \"9e4497f9-5c79-4ab4-b6fd-501ba13e3ede\") " pod="cert-manager/cert-manager-86cb77c54b-rd89x" Dec 05 06:07:23 crc kubenswrapper[4742]: I1205 06:07:23.617948 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9e4497f9-5c79-4ab4-b6fd-501ba13e3ede-bound-sa-token\") pod \"cert-manager-86cb77c54b-rd89x\" (UID: \"9e4497f9-5c79-4ab4-b6fd-501ba13e3ede\") " pod="cert-manager/cert-manager-86cb77c54b-rd89x" Dec 05 06:07:23 crc kubenswrapper[4742]: I1205 06:07:23.617985 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fk4t7\" (UniqueName: \"kubernetes.io/projected/9e4497f9-5c79-4ab4-b6fd-501ba13e3ede-kube-api-access-fk4t7\") pod \"cert-manager-86cb77c54b-rd89x\" (UID: \"9e4497f9-5c79-4ab4-b6fd-501ba13e3ede\") " pod="cert-manager/cert-manager-86cb77c54b-rd89x" Dec 05 06:07:23 crc kubenswrapper[4742]: I1205 06:07:23.702812 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-rd89x" Dec 05 06:07:24 crc kubenswrapper[4742]: I1205 06:07:24.207498 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-rd89x"] Dec 05 06:07:24 crc kubenswrapper[4742]: I1205 06:07:24.245450 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-rd89x" event={"ID":"9e4497f9-5c79-4ab4-b6fd-501ba13e3ede","Type":"ContainerStarted","Data":"b500dc79f7e4f9bae8939a65d9d85c103060fb26de7da830c35c9fe1bf7cb97b"} Dec 05 06:07:24 crc kubenswrapper[4742]: I1205 06:07:24.245643 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-c2k4r" podUID="e4fef7b7-3c1a-4732-8710-ac825fca1fea" containerName="registry-server" containerID="cri-o://2ee8c4f0acc7b780f12096055b1a02fa1a069b63115f36c4ef9190e1909d47b0" gracePeriod=2 Dec 05 06:07:25 crc kubenswrapper[4742]: I1205 06:07:25.255514 4742 generic.go:334] "Generic (PLEG): container finished" podID="e4fef7b7-3c1a-4732-8710-ac825fca1fea" containerID="2ee8c4f0acc7b780f12096055b1a02fa1a069b63115f36c4ef9190e1909d47b0" exitCode=0 Dec 05 06:07:25 crc kubenswrapper[4742]: I1205 06:07:25.255563 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c2k4r" event={"ID":"e4fef7b7-3c1a-4732-8710-ac825fca1fea","Type":"ContainerDied","Data":"2ee8c4f0acc7b780f12096055b1a02fa1a069b63115f36c4ef9190e1909d47b0"} Dec 05 06:07:25 crc kubenswrapper[4742]: I1205 06:07:25.255866 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c2k4r" event={"ID":"e4fef7b7-3c1a-4732-8710-ac825fca1fea","Type":"ContainerDied","Data":"76a2ceb66589659adae3f11c55849630f5c86cdfaf912ececdbe37b88f48d114"} Dec 05 06:07:25 crc kubenswrapper[4742]: I1205 06:07:25.255885 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="76a2ceb66589659adae3f11c55849630f5c86cdfaf912ececdbe37b88f48d114" Dec 05 06:07:25 crc kubenswrapper[4742]: I1205 06:07:25.258392 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-rd89x" event={"ID":"9e4497f9-5c79-4ab4-b6fd-501ba13e3ede","Type":"ContainerStarted","Data":"a132abfd03837a3c2a10a5df551435a760a5f7288af2812b4868ecc63cdfed96"} Dec 05 06:07:25 crc kubenswrapper[4742]: I1205 06:07:25.279368 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-86cb77c54b-rd89x" podStartSLOduration=2.279351437 podStartE2EDuration="2.279351437s" podCreationTimestamp="2025-12-05 06:07:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:07:25.274719465 +0000 UTC m=+921.186854567" watchObservedRunningTime="2025-12-05 06:07:25.279351437 +0000 UTC m=+921.191486499" Dec 05 06:07:25 crc kubenswrapper[4742]: I1205 06:07:25.281434 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c2k4r" Dec 05 06:07:25 crc kubenswrapper[4742]: I1205 06:07:25.415614 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4fef7b7-3c1a-4732-8710-ac825fca1fea-utilities\") pod \"e4fef7b7-3c1a-4732-8710-ac825fca1fea\" (UID: \"e4fef7b7-3c1a-4732-8710-ac825fca1fea\") " Dec 05 06:07:25 crc kubenswrapper[4742]: I1205 06:07:25.415680 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4fef7b7-3c1a-4732-8710-ac825fca1fea-catalog-content\") pod \"e4fef7b7-3c1a-4732-8710-ac825fca1fea\" (UID: \"e4fef7b7-3c1a-4732-8710-ac825fca1fea\") " Dec 05 06:07:25 crc kubenswrapper[4742]: I1205 06:07:25.415819 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thjf9\" (UniqueName: \"kubernetes.io/projected/e4fef7b7-3c1a-4732-8710-ac825fca1fea-kube-api-access-thjf9\") pod \"e4fef7b7-3c1a-4732-8710-ac825fca1fea\" (UID: \"e4fef7b7-3c1a-4732-8710-ac825fca1fea\") " Dec 05 06:07:25 crc kubenswrapper[4742]: I1205 06:07:25.416389 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e4fef7b7-3c1a-4732-8710-ac825fca1fea-utilities" (OuterVolumeSpecName: "utilities") pod "e4fef7b7-3c1a-4732-8710-ac825fca1fea" (UID: "e4fef7b7-3c1a-4732-8710-ac825fca1fea"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:07:25 crc kubenswrapper[4742]: I1205 06:07:25.424577 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4fef7b7-3c1a-4732-8710-ac825fca1fea-kube-api-access-thjf9" (OuterVolumeSpecName: "kube-api-access-thjf9") pod "e4fef7b7-3c1a-4732-8710-ac825fca1fea" (UID: "e4fef7b7-3c1a-4732-8710-ac825fca1fea"). InnerVolumeSpecName "kube-api-access-thjf9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:07:25 crc kubenswrapper[4742]: I1205 06:07:25.475417 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e4fef7b7-3c1a-4732-8710-ac825fca1fea-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e4fef7b7-3c1a-4732-8710-ac825fca1fea" (UID: "e4fef7b7-3c1a-4732-8710-ac825fca1fea"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:07:25 crc kubenswrapper[4742]: I1205 06:07:25.516890 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e4fef7b7-3c1a-4732-8710-ac825fca1fea-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:07:25 crc kubenswrapper[4742]: I1205 06:07:25.516919 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e4fef7b7-3c1a-4732-8710-ac825fca1fea-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:07:25 crc kubenswrapper[4742]: I1205 06:07:25.516930 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thjf9\" (UniqueName: \"kubernetes.io/projected/e4fef7b7-3c1a-4732-8710-ac825fca1fea-kube-api-access-thjf9\") on node \"crc\" DevicePath \"\"" Dec 05 06:07:26 crc kubenswrapper[4742]: I1205 06:07:26.264546 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c2k4r" Dec 05 06:07:26 crc kubenswrapper[4742]: I1205 06:07:26.306165 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c2k4r"] Dec 05 06:07:26 crc kubenswrapper[4742]: I1205 06:07:26.321253 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-c2k4r"] Dec 05 06:07:26 crc kubenswrapper[4742]: I1205 06:07:26.393715 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4fef7b7-3c1a-4732-8710-ac825fca1fea" path="/var/lib/kubelet/pods/e4fef7b7-3c1a-4732-8710-ac825fca1fea/volumes" Dec 05 06:07:27 crc kubenswrapper[4742]: I1205 06:07:27.532409 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2kq4k"] Dec 05 06:07:27 crc kubenswrapper[4742]: E1205 06:07:27.532972 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4fef7b7-3c1a-4732-8710-ac825fca1fea" containerName="extract-content" Dec 05 06:07:27 crc kubenswrapper[4742]: I1205 06:07:27.532985 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4fef7b7-3c1a-4732-8710-ac825fca1fea" containerName="extract-content" Dec 05 06:07:27 crc kubenswrapper[4742]: E1205 06:07:27.532995 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4fef7b7-3c1a-4732-8710-ac825fca1fea" containerName="registry-server" Dec 05 06:07:27 crc kubenswrapper[4742]: I1205 06:07:27.533001 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4fef7b7-3c1a-4732-8710-ac825fca1fea" containerName="registry-server" Dec 05 06:07:27 crc kubenswrapper[4742]: E1205 06:07:27.533010 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4fef7b7-3c1a-4732-8710-ac825fca1fea" containerName="extract-utilities" Dec 05 06:07:27 crc kubenswrapper[4742]: I1205 06:07:27.533016 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4fef7b7-3c1a-4732-8710-ac825fca1fea" containerName="extract-utilities" Dec 05 06:07:27 crc kubenswrapper[4742]: I1205 06:07:27.533134 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4fef7b7-3c1a-4732-8710-ac825fca1fea" containerName="registry-server" Dec 05 06:07:27 crc kubenswrapper[4742]: I1205 06:07:27.533881 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2kq4k" Dec 05 06:07:27 crc kubenswrapper[4742]: I1205 06:07:27.614681 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2kq4k"] Dec 05 06:07:27 crc kubenswrapper[4742]: I1205 06:07:27.648017 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j78sj\" (UniqueName: \"kubernetes.io/projected/edd92f41-0e9c-47a1-86ed-eb3fefd43936-kube-api-access-j78sj\") pod \"community-operators-2kq4k\" (UID: \"edd92f41-0e9c-47a1-86ed-eb3fefd43936\") " pod="openshift-marketplace/community-operators-2kq4k" Dec 05 06:07:27 crc kubenswrapper[4742]: I1205 06:07:27.648066 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edd92f41-0e9c-47a1-86ed-eb3fefd43936-utilities\") pod \"community-operators-2kq4k\" (UID: \"edd92f41-0e9c-47a1-86ed-eb3fefd43936\") " pod="openshift-marketplace/community-operators-2kq4k" Dec 05 06:07:27 crc kubenswrapper[4742]: I1205 06:07:27.648097 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edd92f41-0e9c-47a1-86ed-eb3fefd43936-catalog-content\") pod \"community-operators-2kq4k\" (UID: \"edd92f41-0e9c-47a1-86ed-eb3fefd43936\") " pod="openshift-marketplace/community-operators-2kq4k" Dec 05 06:07:27 crc kubenswrapper[4742]: I1205 06:07:27.748950 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edd92f41-0e9c-47a1-86ed-eb3fefd43936-utilities\") pod \"community-operators-2kq4k\" (UID: \"edd92f41-0e9c-47a1-86ed-eb3fefd43936\") " pod="openshift-marketplace/community-operators-2kq4k" Dec 05 06:07:27 crc kubenswrapper[4742]: I1205 06:07:27.749331 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j78sj\" (UniqueName: \"kubernetes.io/projected/edd92f41-0e9c-47a1-86ed-eb3fefd43936-kube-api-access-j78sj\") pod \"community-operators-2kq4k\" (UID: \"edd92f41-0e9c-47a1-86ed-eb3fefd43936\") " pod="openshift-marketplace/community-operators-2kq4k" Dec 05 06:07:27 crc kubenswrapper[4742]: I1205 06:07:27.749492 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edd92f41-0e9c-47a1-86ed-eb3fefd43936-catalog-content\") pod \"community-operators-2kq4k\" (UID: \"edd92f41-0e9c-47a1-86ed-eb3fefd43936\") " pod="openshift-marketplace/community-operators-2kq4k" Dec 05 06:07:27 crc kubenswrapper[4742]: I1205 06:07:27.749583 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edd92f41-0e9c-47a1-86ed-eb3fefd43936-utilities\") pod \"community-operators-2kq4k\" (UID: \"edd92f41-0e9c-47a1-86ed-eb3fefd43936\") " pod="openshift-marketplace/community-operators-2kq4k" Dec 05 06:07:27 crc kubenswrapper[4742]: I1205 06:07:27.750020 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edd92f41-0e9c-47a1-86ed-eb3fefd43936-catalog-content\") pod \"community-operators-2kq4k\" (UID: \"edd92f41-0e9c-47a1-86ed-eb3fefd43936\") " pod="openshift-marketplace/community-operators-2kq4k" Dec 05 06:07:27 crc kubenswrapper[4742]: I1205 06:07:27.783949 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j78sj\" (UniqueName: \"kubernetes.io/projected/edd92f41-0e9c-47a1-86ed-eb3fefd43936-kube-api-access-j78sj\") pod \"community-operators-2kq4k\" (UID: \"edd92f41-0e9c-47a1-86ed-eb3fefd43936\") " pod="openshift-marketplace/community-operators-2kq4k" Dec 05 06:07:27 crc kubenswrapper[4742]: I1205 06:07:27.862137 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2kq4k" Dec 05 06:07:28 crc kubenswrapper[4742]: I1205 06:07:28.349404 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2kq4k"] Dec 05 06:07:28 crc kubenswrapper[4742]: W1205 06:07:28.353220 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podedd92f41_0e9c_47a1_86ed_eb3fefd43936.slice/crio-c36bef3ed62d8c4866c967f652192743708af62c4d333d9ea93ea017813f4a0c WatchSource:0}: Error finding container c36bef3ed62d8c4866c967f652192743708af62c4d333d9ea93ea017813f4a0c: Status 404 returned error can't find the container with id c36bef3ed62d8c4866c967f652192743708af62c4d333d9ea93ea017813f4a0c Dec 05 06:07:29 crc kubenswrapper[4742]: I1205 06:07:29.288376 4742 generic.go:334] "Generic (PLEG): container finished" podID="edd92f41-0e9c-47a1-86ed-eb3fefd43936" containerID="80ef567138d161facb0756c109f62bd19cf413c90ae3cb14c8c77f60a8ead3f8" exitCode=0 Dec 05 06:07:29 crc kubenswrapper[4742]: I1205 06:07:29.288466 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2kq4k" event={"ID":"edd92f41-0e9c-47a1-86ed-eb3fefd43936","Type":"ContainerDied","Data":"80ef567138d161facb0756c109f62bd19cf413c90ae3cb14c8c77f60a8ead3f8"} Dec 05 06:07:29 crc kubenswrapper[4742]: I1205 06:07:29.288686 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2kq4k" event={"ID":"edd92f41-0e9c-47a1-86ed-eb3fefd43936","Type":"ContainerStarted","Data":"c36bef3ed62d8c4866c967f652192743708af62c4d333d9ea93ea017813f4a0c"} Dec 05 06:07:31 crc kubenswrapper[4742]: I1205 06:07:31.305168 4742 generic.go:334] "Generic (PLEG): container finished" podID="edd92f41-0e9c-47a1-86ed-eb3fefd43936" containerID="4adaec6ad1e250ccf5f13cff39a9cbeee3e8dc18d32d73646846cacff7503cc2" exitCode=0 Dec 05 06:07:31 crc kubenswrapper[4742]: I1205 06:07:31.305266 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2kq4k" event={"ID":"edd92f41-0e9c-47a1-86ed-eb3fefd43936","Type":"ContainerDied","Data":"4adaec6ad1e250ccf5f13cff39a9cbeee3e8dc18d32d73646846cacff7503cc2"} Dec 05 06:07:32 crc kubenswrapper[4742]: I1205 06:07:32.313877 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2kq4k" event={"ID":"edd92f41-0e9c-47a1-86ed-eb3fefd43936","Type":"ContainerStarted","Data":"6e684ae580f405ace68c908f55ce200337272d3fdf8e7406e288c2c3c6f9bdf5"} Dec 05 06:07:32 crc kubenswrapper[4742]: I1205 06:07:32.332344 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2kq4k" podStartSLOduration=2.8398645780000003 podStartE2EDuration="5.332330303s" podCreationTimestamp="2025-12-05 06:07:27 +0000 UTC" firstStartedPulling="2025-12-05 06:07:29.290985619 +0000 UTC m=+925.203120691" lastFinishedPulling="2025-12-05 06:07:31.783451314 +0000 UTC m=+927.695586416" observedRunningTime="2025-12-05 06:07:32.330091374 +0000 UTC m=+928.242226436" watchObservedRunningTime="2025-12-05 06:07:32.332330303 +0000 UTC m=+928.244465365" Dec 05 06:07:36 crc kubenswrapper[4742]: I1205 06:07:36.221743 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-jk4jz"] Dec 05 06:07:36 crc kubenswrapper[4742]: I1205 06:07:36.222834 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jk4jz" Dec 05 06:07:36 crc kubenswrapper[4742]: I1205 06:07:36.225173 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 05 06:07:36 crc kubenswrapper[4742]: I1205 06:07:36.225281 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-rd85s" Dec 05 06:07:36 crc kubenswrapper[4742]: I1205 06:07:36.225961 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 05 06:07:36 crc kubenswrapper[4742]: I1205 06:07:36.235825 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-jk4jz"] Dec 05 06:07:36 crc kubenswrapper[4742]: I1205 06:07:36.395022 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7b5tk\" (UniqueName: \"kubernetes.io/projected/fa3082f0-b278-4bd6-b457-8a06652d93f4-kube-api-access-7b5tk\") pod \"openstack-operator-index-jk4jz\" (UID: \"fa3082f0-b278-4bd6-b457-8a06652d93f4\") " pod="openstack-operators/openstack-operator-index-jk4jz" Dec 05 06:07:36 crc kubenswrapper[4742]: I1205 06:07:36.495884 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7b5tk\" (UniqueName: \"kubernetes.io/projected/fa3082f0-b278-4bd6-b457-8a06652d93f4-kube-api-access-7b5tk\") pod \"openstack-operator-index-jk4jz\" (UID: \"fa3082f0-b278-4bd6-b457-8a06652d93f4\") " pod="openstack-operators/openstack-operator-index-jk4jz" Dec 05 06:07:36 crc kubenswrapper[4742]: I1205 06:07:36.520106 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7b5tk\" (UniqueName: \"kubernetes.io/projected/fa3082f0-b278-4bd6-b457-8a06652d93f4-kube-api-access-7b5tk\") pod \"openstack-operator-index-jk4jz\" (UID: \"fa3082f0-b278-4bd6-b457-8a06652d93f4\") " pod="openstack-operators/openstack-operator-index-jk4jz" Dec 05 06:07:36 crc kubenswrapper[4742]: I1205 06:07:36.545257 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jk4jz" Dec 05 06:07:36 crc kubenswrapper[4742]: I1205 06:07:36.740410 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-jk4jz"] Dec 05 06:07:36 crc kubenswrapper[4742]: W1205 06:07:36.745290 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfa3082f0_b278_4bd6_b457_8a06652d93f4.slice/crio-97748c066f11ccdc82463df270e791e55c86e07836d5e56b5b737022e1840618 WatchSource:0}: Error finding container 97748c066f11ccdc82463df270e791e55c86e07836d5e56b5b737022e1840618: Status 404 returned error can't find the container with id 97748c066f11ccdc82463df270e791e55c86e07836d5e56b5b737022e1840618 Dec 05 06:07:37 crc kubenswrapper[4742]: I1205 06:07:37.371741 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jk4jz" event={"ID":"fa3082f0-b278-4bd6-b457-8a06652d93f4","Type":"ContainerStarted","Data":"97748c066f11ccdc82463df270e791e55c86e07836d5e56b5b737022e1840618"} Dec 05 06:07:37 crc kubenswrapper[4742]: I1205 06:07:37.863246 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2kq4k" Dec 05 06:07:37 crc kubenswrapper[4742]: I1205 06:07:37.863295 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2kq4k" Dec 05 06:07:37 crc kubenswrapper[4742]: I1205 06:07:37.912697 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2kq4k" Dec 05 06:07:38 crc kubenswrapper[4742]: I1205 06:07:38.424081 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2kq4k" Dec 05 06:07:40 crc kubenswrapper[4742]: I1205 06:07:40.011948 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-jk4jz"] Dec 05 06:07:40 crc kubenswrapper[4742]: I1205 06:07:40.821300 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-wf5gb"] Dec 05 06:07:40 crc kubenswrapper[4742]: I1205 06:07:40.823249 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-wf5gb" Dec 05 06:07:40 crc kubenswrapper[4742]: I1205 06:07:40.832046 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-wf5gb"] Dec 05 06:07:40 crc kubenswrapper[4742]: I1205 06:07:40.993625 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4k4xj\" (UniqueName: \"kubernetes.io/projected/9f4bfd9b-9056-4096-804c-5cdf8a6a29d4-kube-api-access-4k4xj\") pod \"openstack-operator-index-wf5gb\" (UID: \"9f4bfd9b-9056-4096-804c-5cdf8a6a29d4\") " pod="openstack-operators/openstack-operator-index-wf5gb" Dec 05 06:07:41 crc kubenswrapper[4742]: I1205 06:07:41.094974 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4k4xj\" (UniqueName: \"kubernetes.io/projected/9f4bfd9b-9056-4096-804c-5cdf8a6a29d4-kube-api-access-4k4xj\") pod \"openstack-operator-index-wf5gb\" (UID: \"9f4bfd9b-9056-4096-804c-5cdf8a6a29d4\") " pod="openstack-operators/openstack-operator-index-wf5gb" Dec 05 06:07:41 crc kubenswrapper[4742]: I1205 06:07:41.119738 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4k4xj\" (UniqueName: \"kubernetes.io/projected/9f4bfd9b-9056-4096-804c-5cdf8a6a29d4-kube-api-access-4k4xj\") pod \"openstack-operator-index-wf5gb\" (UID: \"9f4bfd9b-9056-4096-804c-5cdf8a6a29d4\") " pod="openstack-operators/openstack-operator-index-wf5gb" Dec 05 06:07:41 crc kubenswrapper[4742]: I1205 06:07:41.174856 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-wf5gb" Dec 05 06:07:41 crc kubenswrapper[4742]: I1205 06:07:41.411655 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-wf5gb"] Dec 05 06:07:41 crc kubenswrapper[4742]: I1205 06:07:41.418188 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jk4jz" event={"ID":"fa3082f0-b278-4bd6-b457-8a06652d93f4","Type":"ContainerStarted","Data":"aefe54d8b495ed42e9766da095f0d2f9162499ed2815a41939e2351b8fe639c7"} Dec 05 06:07:41 crc kubenswrapper[4742]: I1205 06:07:41.418654 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-jk4jz" podUID="fa3082f0-b278-4bd6-b457-8a06652d93f4" containerName="registry-server" containerID="cri-o://aefe54d8b495ed42e9766da095f0d2f9162499ed2815a41939e2351b8fe639c7" gracePeriod=2 Dec 05 06:07:41 crc kubenswrapper[4742]: I1205 06:07:41.448038 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-jk4jz" podStartSLOduration=1.778179453 podStartE2EDuration="5.448015465s" podCreationTimestamp="2025-12-05 06:07:36 +0000 UTC" firstStartedPulling="2025-12-05 06:07:36.747684028 +0000 UTC m=+932.659819090" lastFinishedPulling="2025-12-05 06:07:40.41752001 +0000 UTC m=+936.329655102" observedRunningTime="2025-12-05 06:07:41.445201681 +0000 UTC m=+937.357336763" watchObservedRunningTime="2025-12-05 06:07:41.448015465 +0000 UTC m=+937.360150537" Dec 05 06:07:42 crc kubenswrapper[4742]: I1205 06:07:42.399232 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jk4jz" Dec 05 06:07:42 crc kubenswrapper[4742]: I1205 06:07:42.425954 4742 generic.go:334] "Generic (PLEG): container finished" podID="fa3082f0-b278-4bd6-b457-8a06652d93f4" containerID="aefe54d8b495ed42e9766da095f0d2f9162499ed2815a41939e2351b8fe639c7" exitCode=0 Dec 05 06:07:42 crc kubenswrapper[4742]: I1205 06:07:42.426015 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jk4jz" event={"ID":"fa3082f0-b278-4bd6-b457-8a06652d93f4","Type":"ContainerDied","Data":"aefe54d8b495ed42e9766da095f0d2f9162499ed2815a41939e2351b8fe639c7"} Dec 05 06:07:42 crc kubenswrapper[4742]: I1205 06:07:42.426047 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jk4jz" event={"ID":"fa3082f0-b278-4bd6-b457-8a06652d93f4","Type":"ContainerDied","Data":"97748c066f11ccdc82463df270e791e55c86e07836d5e56b5b737022e1840618"} Dec 05 06:07:42 crc kubenswrapper[4742]: I1205 06:07:42.426085 4742 scope.go:117] "RemoveContainer" containerID="aefe54d8b495ed42e9766da095f0d2f9162499ed2815a41939e2351b8fe639c7" Dec 05 06:07:42 crc kubenswrapper[4742]: I1205 06:07:42.426194 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jk4jz" Dec 05 06:07:42 crc kubenswrapper[4742]: I1205 06:07:42.437350 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-wf5gb" event={"ID":"9f4bfd9b-9056-4096-804c-5cdf8a6a29d4","Type":"ContainerStarted","Data":"19ec3cce2b02e7098fb7b41b9cc6279e9f97588b892a7d26dd93a56bdc15d54d"} Dec 05 06:07:42 crc kubenswrapper[4742]: I1205 06:07:42.437399 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-wf5gb" event={"ID":"9f4bfd9b-9056-4096-804c-5cdf8a6a29d4","Type":"ContainerStarted","Data":"1f7a047566b26549d15765c503239a80128836cf887d23b2da7ba541eff665d8"} Dec 05 06:07:42 crc kubenswrapper[4742]: I1205 06:07:42.448793 4742 scope.go:117] "RemoveContainer" containerID="aefe54d8b495ed42e9766da095f0d2f9162499ed2815a41939e2351b8fe639c7" Dec 05 06:07:42 crc kubenswrapper[4742]: E1205 06:07:42.450082 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aefe54d8b495ed42e9766da095f0d2f9162499ed2815a41939e2351b8fe639c7\": container with ID starting with aefe54d8b495ed42e9766da095f0d2f9162499ed2815a41939e2351b8fe639c7 not found: ID does not exist" containerID="aefe54d8b495ed42e9766da095f0d2f9162499ed2815a41939e2351b8fe639c7" Dec 05 06:07:42 crc kubenswrapper[4742]: I1205 06:07:42.450118 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aefe54d8b495ed42e9766da095f0d2f9162499ed2815a41939e2351b8fe639c7"} err="failed to get container status \"aefe54d8b495ed42e9766da095f0d2f9162499ed2815a41939e2351b8fe639c7\": rpc error: code = NotFound desc = could not find container \"aefe54d8b495ed42e9766da095f0d2f9162499ed2815a41939e2351b8fe639c7\": container with ID starting with aefe54d8b495ed42e9766da095f0d2f9162499ed2815a41939e2351b8fe639c7 not found: ID does not exist" Dec 05 06:07:42 crc kubenswrapper[4742]: I1205 06:07:42.466530 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-wf5gb" podStartSLOduration=2.24138818 podStartE2EDuration="2.466512784s" podCreationTimestamp="2025-12-05 06:07:40 +0000 UTC" firstStartedPulling="2025-12-05 06:07:41.425227875 +0000 UTC m=+937.337362977" lastFinishedPulling="2025-12-05 06:07:41.650352509 +0000 UTC m=+937.562487581" observedRunningTime="2025-12-05 06:07:42.463206186 +0000 UTC m=+938.375341248" watchObservedRunningTime="2025-12-05 06:07:42.466512784 +0000 UTC m=+938.378647856" Dec 05 06:07:42 crc kubenswrapper[4742]: I1205 06:07:42.517599 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7b5tk\" (UniqueName: \"kubernetes.io/projected/fa3082f0-b278-4bd6-b457-8a06652d93f4-kube-api-access-7b5tk\") pod \"fa3082f0-b278-4bd6-b457-8a06652d93f4\" (UID: \"fa3082f0-b278-4bd6-b457-8a06652d93f4\") " Dec 05 06:07:42 crc kubenswrapper[4742]: I1205 06:07:42.523003 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa3082f0-b278-4bd6-b457-8a06652d93f4-kube-api-access-7b5tk" (OuterVolumeSpecName: "kube-api-access-7b5tk") pod "fa3082f0-b278-4bd6-b457-8a06652d93f4" (UID: "fa3082f0-b278-4bd6-b457-8a06652d93f4"). InnerVolumeSpecName "kube-api-access-7b5tk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:07:42 crc kubenswrapper[4742]: I1205 06:07:42.603709 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2kq4k"] Dec 05 06:07:42 crc kubenswrapper[4742]: I1205 06:07:42.603934 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2kq4k" podUID="edd92f41-0e9c-47a1-86ed-eb3fefd43936" containerName="registry-server" containerID="cri-o://6e684ae580f405ace68c908f55ce200337272d3fdf8e7406e288c2c3c6f9bdf5" gracePeriod=2 Dec 05 06:07:42 crc kubenswrapper[4742]: I1205 06:07:42.618664 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7b5tk\" (UniqueName: \"kubernetes.io/projected/fa3082f0-b278-4bd6-b457-8a06652d93f4-kube-api-access-7b5tk\") on node \"crc\" DevicePath \"\"" Dec 05 06:07:42 crc kubenswrapper[4742]: I1205 06:07:42.762350 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-jk4jz"] Dec 05 06:07:42 crc kubenswrapper[4742]: I1205 06:07:42.766600 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-jk4jz"] Dec 05 06:07:43 crc kubenswrapper[4742]: I1205 06:07:43.025848 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2kq4k" Dec 05 06:07:43 crc kubenswrapper[4742]: I1205 06:07:43.226123 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edd92f41-0e9c-47a1-86ed-eb3fefd43936-catalog-content\") pod \"edd92f41-0e9c-47a1-86ed-eb3fefd43936\" (UID: \"edd92f41-0e9c-47a1-86ed-eb3fefd43936\") " Dec 05 06:07:43 crc kubenswrapper[4742]: I1205 06:07:43.226250 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j78sj\" (UniqueName: \"kubernetes.io/projected/edd92f41-0e9c-47a1-86ed-eb3fefd43936-kube-api-access-j78sj\") pod \"edd92f41-0e9c-47a1-86ed-eb3fefd43936\" (UID: \"edd92f41-0e9c-47a1-86ed-eb3fefd43936\") " Dec 05 06:07:43 crc kubenswrapper[4742]: I1205 06:07:43.226309 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edd92f41-0e9c-47a1-86ed-eb3fefd43936-utilities\") pod \"edd92f41-0e9c-47a1-86ed-eb3fefd43936\" (UID: \"edd92f41-0e9c-47a1-86ed-eb3fefd43936\") " Dec 05 06:07:43 crc kubenswrapper[4742]: I1205 06:07:43.228215 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/edd92f41-0e9c-47a1-86ed-eb3fefd43936-utilities" (OuterVolumeSpecName: "utilities") pod "edd92f41-0e9c-47a1-86ed-eb3fefd43936" (UID: "edd92f41-0e9c-47a1-86ed-eb3fefd43936"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:07:43 crc kubenswrapper[4742]: I1205 06:07:43.237394 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/edd92f41-0e9c-47a1-86ed-eb3fefd43936-kube-api-access-j78sj" (OuterVolumeSpecName: "kube-api-access-j78sj") pod "edd92f41-0e9c-47a1-86ed-eb3fefd43936" (UID: "edd92f41-0e9c-47a1-86ed-eb3fefd43936"). InnerVolumeSpecName "kube-api-access-j78sj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:07:43 crc kubenswrapper[4742]: I1205 06:07:43.312283 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/edd92f41-0e9c-47a1-86ed-eb3fefd43936-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "edd92f41-0e9c-47a1-86ed-eb3fefd43936" (UID: "edd92f41-0e9c-47a1-86ed-eb3fefd43936"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:07:43 crc kubenswrapper[4742]: I1205 06:07:43.327875 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edd92f41-0e9c-47a1-86ed-eb3fefd43936-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:07:43 crc kubenswrapper[4742]: I1205 06:07:43.327905 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edd92f41-0e9c-47a1-86ed-eb3fefd43936-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:07:43 crc kubenswrapper[4742]: I1205 06:07:43.327920 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j78sj\" (UniqueName: \"kubernetes.io/projected/edd92f41-0e9c-47a1-86ed-eb3fefd43936-kube-api-access-j78sj\") on node \"crc\" DevicePath \"\"" Dec 05 06:07:43 crc kubenswrapper[4742]: I1205 06:07:43.455903 4742 generic.go:334] "Generic (PLEG): container finished" podID="edd92f41-0e9c-47a1-86ed-eb3fefd43936" containerID="6e684ae580f405ace68c908f55ce200337272d3fdf8e7406e288c2c3c6f9bdf5" exitCode=0 Dec 05 06:07:43 crc kubenswrapper[4742]: I1205 06:07:43.455979 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2kq4k" event={"ID":"edd92f41-0e9c-47a1-86ed-eb3fefd43936","Type":"ContainerDied","Data":"6e684ae580f405ace68c908f55ce200337272d3fdf8e7406e288c2c3c6f9bdf5"} Dec 05 06:07:43 crc kubenswrapper[4742]: I1205 06:07:43.456021 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2kq4k" event={"ID":"edd92f41-0e9c-47a1-86ed-eb3fefd43936","Type":"ContainerDied","Data":"c36bef3ed62d8c4866c967f652192743708af62c4d333d9ea93ea017813f4a0c"} Dec 05 06:07:43 crc kubenswrapper[4742]: I1205 06:07:43.456038 4742 scope.go:117] "RemoveContainer" containerID="6e684ae580f405ace68c908f55ce200337272d3fdf8e7406e288c2c3c6f9bdf5" Dec 05 06:07:43 crc kubenswrapper[4742]: I1205 06:07:43.456089 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2kq4k" Dec 05 06:07:43 crc kubenswrapper[4742]: I1205 06:07:43.487524 4742 scope.go:117] "RemoveContainer" containerID="4adaec6ad1e250ccf5f13cff39a9cbeee3e8dc18d32d73646846cacff7503cc2" Dec 05 06:07:43 crc kubenswrapper[4742]: I1205 06:07:43.515191 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2kq4k"] Dec 05 06:07:43 crc kubenswrapper[4742]: I1205 06:07:43.522599 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2kq4k"] Dec 05 06:07:43 crc kubenswrapper[4742]: I1205 06:07:43.526430 4742 scope.go:117] "RemoveContainer" containerID="80ef567138d161facb0756c109f62bd19cf413c90ae3cb14c8c77f60a8ead3f8" Dec 05 06:07:43 crc kubenswrapper[4742]: I1205 06:07:43.556699 4742 scope.go:117] "RemoveContainer" containerID="6e684ae580f405ace68c908f55ce200337272d3fdf8e7406e288c2c3c6f9bdf5" Dec 05 06:07:43 crc kubenswrapper[4742]: E1205 06:07:43.557183 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e684ae580f405ace68c908f55ce200337272d3fdf8e7406e288c2c3c6f9bdf5\": container with ID starting with 6e684ae580f405ace68c908f55ce200337272d3fdf8e7406e288c2c3c6f9bdf5 not found: ID does not exist" containerID="6e684ae580f405ace68c908f55ce200337272d3fdf8e7406e288c2c3c6f9bdf5" Dec 05 06:07:43 crc kubenswrapper[4742]: I1205 06:07:43.557236 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e684ae580f405ace68c908f55ce200337272d3fdf8e7406e288c2c3c6f9bdf5"} err="failed to get container status \"6e684ae580f405ace68c908f55ce200337272d3fdf8e7406e288c2c3c6f9bdf5\": rpc error: code = NotFound desc = could not find container \"6e684ae580f405ace68c908f55ce200337272d3fdf8e7406e288c2c3c6f9bdf5\": container with ID starting with 6e684ae580f405ace68c908f55ce200337272d3fdf8e7406e288c2c3c6f9bdf5 not found: ID does not exist" Dec 05 06:07:43 crc kubenswrapper[4742]: I1205 06:07:43.557271 4742 scope.go:117] "RemoveContainer" containerID="4adaec6ad1e250ccf5f13cff39a9cbeee3e8dc18d32d73646846cacff7503cc2" Dec 05 06:07:43 crc kubenswrapper[4742]: E1205 06:07:43.557758 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4adaec6ad1e250ccf5f13cff39a9cbeee3e8dc18d32d73646846cacff7503cc2\": container with ID starting with 4adaec6ad1e250ccf5f13cff39a9cbeee3e8dc18d32d73646846cacff7503cc2 not found: ID does not exist" containerID="4adaec6ad1e250ccf5f13cff39a9cbeee3e8dc18d32d73646846cacff7503cc2" Dec 05 06:07:43 crc kubenswrapper[4742]: I1205 06:07:43.557825 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4adaec6ad1e250ccf5f13cff39a9cbeee3e8dc18d32d73646846cacff7503cc2"} err="failed to get container status \"4adaec6ad1e250ccf5f13cff39a9cbeee3e8dc18d32d73646846cacff7503cc2\": rpc error: code = NotFound desc = could not find container \"4adaec6ad1e250ccf5f13cff39a9cbeee3e8dc18d32d73646846cacff7503cc2\": container with ID starting with 4adaec6ad1e250ccf5f13cff39a9cbeee3e8dc18d32d73646846cacff7503cc2 not found: ID does not exist" Dec 05 06:07:43 crc kubenswrapper[4742]: I1205 06:07:43.557869 4742 scope.go:117] "RemoveContainer" containerID="80ef567138d161facb0756c109f62bd19cf413c90ae3cb14c8c77f60a8ead3f8" Dec 05 06:07:43 crc kubenswrapper[4742]: E1205 06:07:43.558268 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80ef567138d161facb0756c109f62bd19cf413c90ae3cb14c8c77f60a8ead3f8\": container with ID starting with 80ef567138d161facb0756c109f62bd19cf413c90ae3cb14c8c77f60a8ead3f8 not found: ID does not exist" containerID="80ef567138d161facb0756c109f62bd19cf413c90ae3cb14c8c77f60a8ead3f8" Dec 05 06:07:43 crc kubenswrapper[4742]: I1205 06:07:43.558350 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80ef567138d161facb0756c109f62bd19cf413c90ae3cb14c8c77f60a8ead3f8"} err="failed to get container status \"80ef567138d161facb0756c109f62bd19cf413c90ae3cb14c8c77f60a8ead3f8\": rpc error: code = NotFound desc = could not find container \"80ef567138d161facb0756c109f62bd19cf413c90ae3cb14c8c77f60a8ead3f8\": container with ID starting with 80ef567138d161facb0756c109f62bd19cf413c90ae3cb14c8c77f60a8ead3f8 not found: ID does not exist" Dec 05 06:07:44 crc kubenswrapper[4742]: I1205 06:07:44.406276 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="edd92f41-0e9c-47a1-86ed-eb3fefd43936" path="/var/lib/kubelet/pods/edd92f41-0e9c-47a1-86ed-eb3fefd43936/volumes" Dec 05 06:07:44 crc kubenswrapper[4742]: I1205 06:07:44.407551 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa3082f0-b278-4bd6-b457-8a06652d93f4" path="/var/lib/kubelet/pods/fa3082f0-b278-4bd6-b457-8a06652d93f4/volumes" Dec 05 06:07:46 crc kubenswrapper[4742]: I1205 06:07:46.671437 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:07:46 crc kubenswrapper[4742]: I1205 06:07:46.671793 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:07:51 crc kubenswrapper[4742]: I1205 06:07:51.175842 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-wf5gb" Dec 05 06:07:51 crc kubenswrapper[4742]: I1205 06:07:51.176237 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-wf5gb" Dec 05 06:07:51 crc kubenswrapper[4742]: I1205 06:07:51.212737 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-wf5gb" Dec 05 06:07:51 crc kubenswrapper[4742]: I1205 06:07:51.541101 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-wf5gb" Dec 05 06:07:54 crc kubenswrapper[4742]: I1205 06:07:54.267128 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46"] Dec 05 06:07:54 crc kubenswrapper[4742]: E1205 06:07:54.267635 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edd92f41-0e9c-47a1-86ed-eb3fefd43936" containerName="extract-content" Dec 05 06:07:54 crc kubenswrapper[4742]: I1205 06:07:54.267650 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="edd92f41-0e9c-47a1-86ed-eb3fefd43936" containerName="extract-content" Dec 05 06:07:54 crc kubenswrapper[4742]: E1205 06:07:54.267662 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa3082f0-b278-4bd6-b457-8a06652d93f4" containerName="registry-server" Dec 05 06:07:54 crc kubenswrapper[4742]: I1205 06:07:54.267670 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa3082f0-b278-4bd6-b457-8a06652d93f4" containerName="registry-server" Dec 05 06:07:54 crc kubenswrapper[4742]: E1205 06:07:54.267687 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edd92f41-0e9c-47a1-86ed-eb3fefd43936" containerName="extract-utilities" Dec 05 06:07:54 crc kubenswrapper[4742]: I1205 06:07:54.267695 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="edd92f41-0e9c-47a1-86ed-eb3fefd43936" containerName="extract-utilities" Dec 05 06:07:54 crc kubenswrapper[4742]: E1205 06:07:54.267716 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edd92f41-0e9c-47a1-86ed-eb3fefd43936" containerName="registry-server" Dec 05 06:07:54 crc kubenswrapper[4742]: I1205 06:07:54.267724 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="edd92f41-0e9c-47a1-86ed-eb3fefd43936" containerName="registry-server" Dec 05 06:07:54 crc kubenswrapper[4742]: I1205 06:07:54.267844 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="edd92f41-0e9c-47a1-86ed-eb3fefd43936" containerName="registry-server" Dec 05 06:07:54 crc kubenswrapper[4742]: I1205 06:07:54.267863 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa3082f0-b278-4bd6-b457-8a06652d93f4" containerName="registry-server" Dec 05 06:07:54 crc kubenswrapper[4742]: I1205 06:07:54.268826 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46" Dec 05 06:07:54 crc kubenswrapper[4742]: I1205 06:07:54.283012 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46"] Dec 05 06:07:54 crc kubenswrapper[4742]: I1205 06:07:54.285236 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-fzd6r" Dec 05 06:07:54 crc kubenswrapper[4742]: I1205 06:07:54.387989 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8a14d1cc-1601-458f-97f6-01c3d6a95510-util\") pod \"7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46\" (UID: \"8a14d1cc-1601-458f-97f6-01c3d6a95510\") " pod="openstack-operators/7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46" Dec 05 06:07:54 crc kubenswrapper[4742]: I1205 06:07:54.388102 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgm8z\" (UniqueName: \"kubernetes.io/projected/8a14d1cc-1601-458f-97f6-01c3d6a95510-kube-api-access-wgm8z\") pod \"7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46\" (UID: \"8a14d1cc-1601-458f-97f6-01c3d6a95510\") " pod="openstack-operators/7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46" Dec 05 06:07:54 crc kubenswrapper[4742]: I1205 06:07:54.388168 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8a14d1cc-1601-458f-97f6-01c3d6a95510-bundle\") pod \"7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46\" (UID: \"8a14d1cc-1601-458f-97f6-01c3d6a95510\") " pod="openstack-operators/7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46" Dec 05 06:07:54 crc kubenswrapper[4742]: I1205 06:07:54.490508 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8a14d1cc-1601-458f-97f6-01c3d6a95510-util\") pod \"7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46\" (UID: \"8a14d1cc-1601-458f-97f6-01c3d6a95510\") " pod="openstack-operators/7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46" Dec 05 06:07:54 crc kubenswrapper[4742]: I1205 06:07:54.490629 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgm8z\" (UniqueName: \"kubernetes.io/projected/8a14d1cc-1601-458f-97f6-01c3d6a95510-kube-api-access-wgm8z\") pod \"7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46\" (UID: \"8a14d1cc-1601-458f-97f6-01c3d6a95510\") " pod="openstack-operators/7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46" Dec 05 06:07:54 crc kubenswrapper[4742]: I1205 06:07:54.490714 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8a14d1cc-1601-458f-97f6-01c3d6a95510-bundle\") pod \"7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46\" (UID: \"8a14d1cc-1601-458f-97f6-01c3d6a95510\") " pod="openstack-operators/7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46" Dec 05 06:07:54 crc kubenswrapper[4742]: I1205 06:07:54.491169 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8a14d1cc-1601-458f-97f6-01c3d6a95510-util\") pod \"7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46\" (UID: \"8a14d1cc-1601-458f-97f6-01c3d6a95510\") " pod="openstack-operators/7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46" Dec 05 06:07:54 crc kubenswrapper[4742]: I1205 06:07:54.491380 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8a14d1cc-1601-458f-97f6-01c3d6a95510-bundle\") pod \"7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46\" (UID: \"8a14d1cc-1601-458f-97f6-01c3d6a95510\") " pod="openstack-operators/7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46" Dec 05 06:07:54 crc kubenswrapper[4742]: I1205 06:07:54.522851 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgm8z\" (UniqueName: \"kubernetes.io/projected/8a14d1cc-1601-458f-97f6-01c3d6a95510-kube-api-access-wgm8z\") pod \"7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46\" (UID: \"8a14d1cc-1601-458f-97f6-01c3d6a95510\") " pod="openstack-operators/7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46" Dec 05 06:07:54 crc kubenswrapper[4742]: I1205 06:07:54.590287 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46" Dec 05 06:07:54 crc kubenswrapper[4742]: I1205 06:07:54.856136 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46"] Dec 05 06:07:55 crc kubenswrapper[4742]: I1205 06:07:55.543228 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46" event={"ID":"8a14d1cc-1601-458f-97f6-01c3d6a95510","Type":"ContainerStarted","Data":"ce8a0ea80f36534fe229fe005b8ab0c8de8c7e70708fa2d16465262c8044c2ff"} Dec 05 06:07:56 crc kubenswrapper[4742]: I1205 06:07:56.557278 4742 generic.go:334] "Generic (PLEG): container finished" podID="8a14d1cc-1601-458f-97f6-01c3d6a95510" containerID="6a27abbaa0d5208e4e38e537a311827c42de43fa47c96b4decca47018dda1163" exitCode=0 Dec 05 06:07:56 crc kubenswrapper[4742]: I1205 06:07:56.557367 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46" event={"ID":"8a14d1cc-1601-458f-97f6-01c3d6a95510","Type":"ContainerDied","Data":"6a27abbaa0d5208e4e38e537a311827c42de43fa47c96b4decca47018dda1163"} Dec 05 06:07:57 crc kubenswrapper[4742]: I1205 06:07:57.570493 4742 generic.go:334] "Generic (PLEG): container finished" podID="8a14d1cc-1601-458f-97f6-01c3d6a95510" containerID="8333e601972555e39dbe2cfe4ee1cfc1627b43b6d9988c6204607ff07a5d8ccc" exitCode=0 Dec 05 06:07:57 crc kubenswrapper[4742]: I1205 06:07:57.570591 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46" event={"ID":"8a14d1cc-1601-458f-97f6-01c3d6a95510","Type":"ContainerDied","Data":"8333e601972555e39dbe2cfe4ee1cfc1627b43b6d9988c6204607ff07a5d8ccc"} Dec 05 06:07:58 crc kubenswrapper[4742]: I1205 06:07:58.583522 4742 generic.go:334] "Generic (PLEG): container finished" podID="8a14d1cc-1601-458f-97f6-01c3d6a95510" containerID="fdbd98344a1eb202f38903bf46fc3c49999ba4b4585ad21ee7206493b903647d" exitCode=0 Dec 05 06:07:58 crc kubenswrapper[4742]: I1205 06:07:58.583607 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46" event={"ID":"8a14d1cc-1601-458f-97f6-01c3d6a95510","Type":"ContainerDied","Data":"fdbd98344a1eb202f38903bf46fc3c49999ba4b4585ad21ee7206493b903647d"} Dec 05 06:07:59 crc kubenswrapper[4742]: I1205 06:07:59.872198 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46" Dec 05 06:08:00 crc kubenswrapper[4742]: I1205 06:08:00.067486 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wgm8z\" (UniqueName: \"kubernetes.io/projected/8a14d1cc-1601-458f-97f6-01c3d6a95510-kube-api-access-wgm8z\") pod \"8a14d1cc-1601-458f-97f6-01c3d6a95510\" (UID: \"8a14d1cc-1601-458f-97f6-01c3d6a95510\") " Dec 05 06:08:00 crc kubenswrapper[4742]: I1205 06:08:00.067561 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8a14d1cc-1601-458f-97f6-01c3d6a95510-bundle\") pod \"8a14d1cc-1601-458f-97f6-01c3d6a95510\" (UID: \"8a14d1cc-1601-458f-97f6-01c3d6a95510\") " Dec 05 06:08:00 crc kubenswrapper[4742]: I1205 06:08:00.067621 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8a14d1cc-1601-458f-97f6-01c3d6a95510-util\") pod \"8a14d1cc-1601-458f-97f6-01c3d6a95510\" (UID: \"8a14d1cc-1601-458f-97f6-01c3d6a95510\") " Dec 05 06:08:00 crc kubenswrapper[4742]: I1205 06:08:00.068364 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8a14d1cc-1601-458f-97f6-01c3d6a95510-bundle" (OuterVolumeSpecName: "bundle") pod "8a14d1cc-1601-458f-97f6-01c3d6a95510" (UID: "8a14d1cc-1601-458f-97f6-01c3d6a95510"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:08:00 crc kubenswrapper[4742]: I1205 06:08:00.073637 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a14d1cc-1601-458f-97f6-01c3d6a95510-kube-api-access-wgm8z" (OuterVolumeSpecName: "kube-api-access-wgm8z") pod "8a14d1cc-1601-458f-97f6-01c3d6a95510" (UID: "8a14d1cc-1601-458f-97f6-01c3d6a95510"). InnerVolumeSpecName "kube-api-access-wgm8z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:08:00 crc kubenswrapper[4742]: I1205 06:08:00.082953 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8a14d1cc-1601-458f-97f6-01c3d6a95510-util" (OuterVolumeSpecName: "util") pod "8a14d1cc-1601-458f-97f6-01c3d6a95510" (UID: "8a14d1cc-1601-458f-97f6-01c3d6a95510"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:08:00 crc kubenswrapper[4742]: I1205 06:08:00.169424 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wgm8z\" (UniqueName: \"kubernetes.io/projected/8a14d1cc-1601-458f-97f6-01c3d6a95510-kube-api-access-wgm8z\") on node \"crc\" DevicePath \"\"" Dec 05 06:08:00 crc kubenswrapper[4742]: I1205 06:08:00.169467 4742 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8a14d1cc-1601-458f-97f6-01c3d6a95510-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:08:00 crc kubenswrapper[4742]: I1205 06:08:00.169478 4742 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8a14d1cc-1601-458f-97f6-01c3d6a95510-util\") on node \"crc\" DevicePath \"\"" Dec 05 06:08:00 crc kubenswrapper[4742]: I1205 06:08:00.601514 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46" event={"ID":"8a14d1cc-1601-458f-97f6-01c3d6a95510","Type":"ContainerDied","Data":"ce8a0ea80f36534fe229fe005b8ab0c8de8c7e70708fa2d16465262c8044c2ff"} Dec 05 06:08:00 crc kubenswrapper[4742]: I1205 06:08:00.601618 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ce8a0ea80f36534fe229fe005b8ab0c8de8c7e70708fa2d16465262c8044c2ff" Dec 05 06:08:00 crc kubenswrapper[4742]: I1205 06:08:00.601593 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46" Dec 05 06:08:04 crc kubenswrapper[4742]: I1205 06:08:04.820644 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-bcrbj"] Dec 05 06:08:04 crc kubenswrapper[4742]: E1205 06:08:04.821185 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a14d1cc-1601-458f-97f6-01c3d6a95510" containerName="util" Dec 05 06:08:04 crc kubenswrapper[4742]: I1205 06:08:04.821199 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a14d1cc-1601-458f-97f6-01c3d6a95510" containerName="util" Dec 05 06:08:04 crc kubenswrapper[4742]: E1205 06:08:04.821218 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a14d1cc-1601-458f-97f6-01c3d6a95510" containerName="pull" Dec 05 06:08:04 crc kubenswrapper[4742]: I1205 06:08:04.821227 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a14d1cc-1601-458f-97f6-01c3d6a95510" containerName="pull" Dec 05 06:08:04 crc kubenswrapper[4742]: E1205 06:08:04.821244 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a14d1cc-1601-458f-97f6-01c3d6a95510" containerName="extract" Dec 05 06:08:04 crc kubenswrapper[4742]: I1205 06:08:04.821253 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a14d1cc-1601-458f-97f6-01c3d6a95510" containerName="extract" Dec 05 06:08:04 crc kubenswrapper[4742]: I1205 06:08:04.821393 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a14d1cc-1601-458f-97f6-01c3d6a95510" containerName="extract" Dec 05 06:08:04 crc kubenswrapper[4742]: I1205 06:08:04.822522 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bcrbj" Dec 05 06:08:04 crc kubenswrapper[4742]: I1205 06:08:04.834539 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bcrbj"] Dec 05 06:08:04 crc kubenswrapper[4742]: I1205 06:08:04.936749 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8576918-5101-4cce-a010-21d17c783f03-utilities\") pod \"redhat-marketplace-bcrbj\" (UID: \"c8576918-5101-4cce-a010-21d17c783f03\") " pod="openshift-marketplace/redhat-marketplace-bcrbj" Dec 05 06:08:04 crc kubenswrapper[4742]: I1205 06:08:04.936806 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8p2z\" (UniqueName: \"kubernetes.io/projected/c8576918-5101-4cce-a010-21d17c783f03-kube-api-access-b8p2z\") pod \"redhat-marketplace-bcrbj\" (UID: \"c8576918-5101-4cce-a010-21d17c783f03\") " pod="openshift-marketplace/redhat-marketplace-bcrbj" Dec 05 06:08:04 crc kubenswrapper[4742]: I1205 06:08:04.936966 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8576918-5101-4cce-a010-21d17c783f03-catalog-content\") pod \"redhat-marketplace-bcrbj\" (UID: \"c8576918-5101-4cce-a010-21d17c783f03\") " pod="openshift-marketplace/redhat-marketplace-bcrbj" Dec 05 06:08:05 crc kubenswrapper[4742]: I1205 06:08:05.038327 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8576918-5101-4cce-a010-21d17c783f03-utilities\") pod \"redhat-marketplace-bcrbj\" (UID: \"c8576918-5101-4cce-a010-21d17c783f03\") " pod="openshift-marketplace/redhat-marketplace-bcrbj" Dec 05 06:08:05 crc kubenswrapper[4742]: I1205 06:08:05.038423 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8p2z\" (UniqueName: \"kubernetes.io/projected/c8576918-5101-4cce-a010-21d17c783f03-kube-api-access-b8p2z\") pod \"redhat-marketplace-bcrbj\" (UID: \"c8576918-5101-4cce-a010-21d17c783f03\") " pod="openshift-marketplace/redhat-marketplace-bcrbj" Dec 05 06:08:05 crc kubenswrapper[4742]: I1205 06:08:05.038456 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8576918-5101-4cce-a010-21d17c783f03-catalog-content\") pod \"redhat-marketplace-bcrbj\" (UID: \"c8576918-5101-4cce-a010-21d17c783f03\") " pod="openshift-marketplace/redhat-marketplace-bcrbj" Dec 05 06:08:05 crc kubenswrapper[4742]: I1205 06:08:05.038966 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8576918-5101-4cce-a010-21d17c783f03-utilities\") pod \"redhat-marketplace-bcrbj\" (UID: \"c8576918-5101-4cce-a010-21d17c783f03\") " pod="openshift-marketplace/redhat-marketplace-bcrbj" Dec 05 06:08:05 crc kubenswrapper[4742]: I1205 06:08:05.039089 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8576918-5101-4cce-a010-21d17c783f03-catalog-content\") pod \"redhat-marketplace-bcrbj\" (UID: \"c8576918-5101-4cce-a010-21d17c783f03\") " pod="openshift-marketplace/redhat-marketplace-bcrbj" Dec 05 06:08:05 crc kubenswrapper[4742]: I1205 06:08:05.061401 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8p2z\" (UniqueName: \"kubernetes.io/projected/c8576918-5101-4cce-a010-21d17c783f03-kube-api-access-b8p2z\") pod \"redhat-marketplace-bcrbj\" (UID: \"c8576918-5101-4cce-a010-21d17c783f03\") " pod="openshift-marketplace/redhat-marketplace-bcrbj" Dec 05 06:08:05 crc kubenswrapper[4742]: I1205 06:08:05.146119 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bcrbj" Dec 05 06:08:05 crc kubenswrapper[4742]: I1205 06:08:05.369893 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-554dbdfbd5-vlt2c"] Dec 05 06:08:05 crc kubenswrapper[4742]: I1205 06:08:05.370867 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-554dbdfbd5-vlt2c" Dec 05 06:08:05 crc kubenswrapper[4742]: I1205 06:08:05.375569 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-4qdkb" Dec 05 06:08:05 crc kubenswrapper[4742]: I1205 06:08:05.407209 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-554dbdfbd5-vlt2c"] Dec 05 06:08:05 crc kubenswrapper[4742]: W1205 06:08:05.463210 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc8576918_5101_4cce_a010_21d17c783f03.slice/crio-6b05280e1097192afba4187eefaf620dacbb87a626151dfbe615b38cc88a4512 WatchSource:0}: Error finding container 6b05280e1097192afba4187eefaf620dacbb87a626151dfbe615b38cc88a4512: Status 404 returned error can't find the container with id 6b05280e1097192afba4187eefaf620dacbb87a626151dfbe615b38cc88a4512 Dec 05 06:08:05 crc kubenswrapper[4742]: I1205 06:08:05.466351 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bcrbj"] Dec 05 06:08:05 crc kubenswrapper[4742]: I1205 06:08:05.544480 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjs2z\" (UniqueName: \"kubernetes.io/projected/d3a0217e-be86-4205-a03c-fc3a3c603ebd-kube-api-access-gjs2z\") pod \"openstack-operator-controller-operator-554dbdfbd5-vlt2c\" (UID: \"d3a0217e-be86-4205-a03c-fc3a3c603ebd\") " pod="openstack-operators/openstack-operator-controller-operator-554dbdfbd5-vlt2c" Dec 05 06:08:05 crc kubenswrapper[4742]: I1205 06:08:05.637805 4742 generic.go:334] "Generic (PLEG): container finished" podID="c8576918-5101-4cce-a010-21d17c783f03" containerID="10f1c63aa46e5d656dea32fcee1de434cf6b00c29a086177f0dde4e83bc11d87" exitCode=0 Dec 05 06:08:05 crc kubenswrapper[4742]: I1205 06:08:05.637846 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bcrbj" event={"ID":"c8576918-5101-4cce-a010-21d17c783f03","Type":"ContainerDied","Data":"10f1c63aa46e5d656dea32fcee1de434cf6b00c29a086177f0dde4e83bc11d87"} Dec 05 06:08:05 crc kubenswrapper[4742]: I1205 06:08:05.637871 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bcrbj" event={"ID":"c8576918-5101-4cce-a010-21d17c783f03","Type":"ContainerStarted","Data":"6b05280e1097192afba4187eefaf620dacbb87a626151dfbe615b38cc88a4512"} Dec 05 06:08:05 crc kubenswrapper[4742]: I1205 06:08:05.646431 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjs2z\" (UniqueName: \"kubernetes.io/projected/d3a0217e-be86-4205-a03c-fc3a3c603ebd-kube-api-access-gjs2z\") pod \"openstack-operator-controller-operator-554dbdfbd5-vlt2c\" (UID: \"d3a0217e-be86-4205-a03c-fc3a3c603ebd\") " pod="openstack-operators/openstack-operator-controller-operator-554dbdfbd5-vlt2c" Dec 05 06:08:05 crc kubenswrapper[4742]: I1205 06:08:05.670361 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjs2z\" (UniqueName: \"kubernetes.io/projected/d3a0217e-be86-4205-a03c-fc3a3c603ebd-kube-api-access-gjs2z\") pod \"openstack-operator-controller-operator-554dbdfbd5-vlt2c\" (UID: \"d3a0217e-be86-4205-a03c-fc3a3c603ebd\") " pod="openstack-operators/openstack-operator-controller-operator-554dbdfbd5-vlt2c" Dec 05 06:08:05 crc kubenswrapper[4742]: I1205 06:08:05.702730 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-554dbdfbd5-vlt2c" Dec 05 06:08:06 crc kubenswrapper[4742]: I1205 06:08:06.125441 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-554dbdfbd5-vlt2c"] Dec 05 06:08:06 crc kubenswrapper[4742]: W1205 06:08:06.134074 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd3a0217e_be86_4205_a03c_fc3a3c603ebd.slice/crio-b6da3b81a04302071f3505fcd01d83ebf3b0421a4916ccf18bae151c437ad1eb WatchSource:0}: Error finding container b6da3b81a04302071f3505fcd01d83ebf3b0421a4916ccf18bae151c437ad1eb: Status 404 returned error can't find the container with id b6da3b81a04302071f3505fcd01d83ebf3b0421a4916ccf18bae151c437ad1eb Dec 05 06:08:06 crc kubenswrapper[4742]: I1205 06:08:06.644816 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-554dbdfbd5-vlt2c" event={"ID":"d3a0217e-be86-4205-a03c-fc3a3c603ebd","Type":"ContainerStarted","Data":"b6da3b81a04302071f3505fcd01d83ebf3b0421a4916ccf18bae151c437ad1eb"} Dec 05 06:08:06 crc kubenswrapper[4742]: I1205 06:08:06.647251 4742 generic.go:334] "Generic (PLEG): container finished" podID="c8576918-5101-4cce-a010-21d17c783f03" containerID="b063d869a2dcd0730e936499bda30b0fcb0fad7d68168cc8e8c095874234c97c" exitCode=0 Dec 05 06:08:06 crc kubenswrapper[4742]: I1205 06:08:06.647280 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bcrbj" event={"ID":"c8576918-5101-4cce-a010-21d17c783f03","Type":"ContainerDied","Data":"b063d869a2dcd0730e936499bda30b0fcb0fad7d68168cc8e8c095874234c97c"} Dec 05 06:08:07 crc kubenswrapper[4742]: I1205 06:08:07.653766 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bcrbj" event={"ID":"c8576918-5101-4cce-a010-21d17c783f03","Type":"ContainerStarted","Data":"cede2ecf2c9c2aa52ac28b60fafd77482af288cc3563f105d2b4748b4153bf73"} Dec 05 06:08:10 crc kubenswrapper[4742]: I1205 06:08:10.680206 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-554dbdfbd5-vlt2c" event={"ID":"d3a0217e-be86-4205-a03c-fc3a3c603ebd","Type":"ContainerStarted","Data":"9f43aab23258c862735fa45ced1486bf29ba8e11117328990d5b36c0933061dd"} Dec 05 06:08:10 crc kubenswrapper[4742]: I1205 06:08:10.680816 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-554dbdfbd5-vlt2c" Dec 05 06:08:10 crc kubenswrapper[4742]: I1205 06:08:10.719968 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-554dbdfbd5-vlt2c" podStartSLOduration=1.664392033 podStartE2EDuration="5.719915672s" podCreationTimestamp="2025-12-05 06:08:05 +0000 UTC" firstStartedPulling="2025-12-05 06:08:06.136226991 +0000 UTC m=+962.048362053" lastFinishedPulling="2025-12-05 06:08:10.19175063 +0000 UTC m=+966.103885692" observedRunningTime="2025-12-05 06:08:10.719626375 +0000 UTC m=+966.631761477" watchObservedRunningTime="2025-12-05 06:08:10.719915672 +0000 UTC m=+966.632050784" Dec 05 06:08:10 crc kubenswrapper[4742]: I1205 06:08:10.729554 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-bcrbj" podStartSLOduration=5.326519241 podStartE2EDuration="6.729531126s" podCreationTimestamp="2025-12-05 06:08:04 +0000 UTC" firstStartedPulling="2025-12-05 06:08:05.639130876 +0000 UTC m=+961.551265948" lastFinishedPulling="2025-12-05 06:08:07.042142771 +0000 UTC m=+962.954277833" observedRunningTime="2025-12-05 06:08:07.682830511 +0000 UTC m=+963.594965573" watchObservedRunningTime="2025-12-05 06:08:10.729531126 +0000 UTC m=+966.641666228" Dec 05 06:08:15 crc kubenswrapper[4742]: I1205 06:08:15.147124 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-bcrbj" Dec 05 06:08:15 crc kubenswrapper[4742]: I1205 06:08:15.147718 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-bcrbj" Dec 05 06:08:15 crc kubenswrapper[4742]: I1205 06:08:15.207729 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-bcrbj" Dec 05 06:08:15 crc kubenswrapper[4742]: I1205 06:08:15.707094 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-554dbdfbd5-vlt2c" Dec 05 06:08:15 crc kubenswrapper[4742]: I1205 06:08:15.796837 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-bcrbj" Dec 05 06:08:16 crc kubenswrapper[4742]: I1205 06:08:16.671678 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:08:16 crc kubenswrapper[4742]: I1205 06:08:16.671764 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:08:16 crc kubenswrapper[4742]: I1205 06:08:16.671832 4742 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" Dec 05 06:08:16 crc kubenswrapper[4742]: I1205 06:08:16.672714 4742 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6c6428d248edc5bf49d6ecdb41f4e3135ccfb37d799e38e42171af4f0f46c67b"} pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 06:08:16 crc kubenswrapper[4742]: I1205 06:08:16.672855 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" containerID="cri-o://6c6428d248edc5bf49d6ecdb41f4e3135ccfb37d799e38e42171af4f0f46c67b" gracePeriod=600 Dec 05 06:08:17 crc kubenswrapper[4742]: I1205 06:08:17.604680 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bcrbj"] Dec 05 06:08:17 crc kubenswrapper[4742]: I1205 06:08:17.745310 4742 generic.go:334] "Generic (PLEG): container finished" podID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerID="6c6428d248edc5bf49d6ecdb41f4e3135ccfb37d799e38e42171af4f0f46c67b" exitCode=0 Dec 05 06:08:17 crc kubenswrapper[4742]: I1205 06:08:17.745549 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-bcrbj" podUID="c8576918-5101-4cce-a010-21d17c783f03" containerName="registry-server" containerID="cri-o://cede2ecf2c9c2aa52ac28b60fafd77482af288cc3563f105d2b4748b4153bf73" gracePeriod=2 Dec 05 06:08:17 crc kubenswrapper[4742]: I1205 06:08:17.746158 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerDied","Data":"6c6428d248edc5bf49d6ecdb41f4e3135ccfb37d799e38e42171af4f0f46c67b"} Dec 05 06:08:17 crc kubenswrapper[4742]: I1205 06:08:17.746244 4742 scope.go:117] "RemoveContainer" containerID="8712ed854f5ba4470f6a7971cc87ba22a52ea34afd6f25ae35634401b00bf15a" Dec 05 06:08:18 crc kubenswrapper[4742]: I1205 06:08:18.143249 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bcrbj" Dec 05 06:08:18 crc kubenswrapper[4742]: I1205 06:08:18.233816 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b8p2z\" (UniqueName: \"kubernetes.io/projected/c8576918-5101-4cce-a010-21d17c783f03-kube-api-access-b8p2z\") pod \"c8576918-5101-4cce-a010-21d17c783f03\" (UID: \"c8576918-5101-4cce-a010-21d17c783f03\") " Dec 05 06:08:18 crc kubenswrapper[4742]: I1205 06:08:18.233882 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8576918-5101-4cce-a010-21d17c783f03-catalog-content\") pod \"c8576918-5101-4cce-a010-21d17c783f03\" (UID: \"c8576918-5101-4cce-a010-21d17c783f03\") " Dec 05 06:08:18 crc kubenswrapper[4742]: I1205 06:08:18.233988 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8576918-5101-4cce-a010-21d17c783f03-utilities\") pod \"c8576918-5101-4cce-a010-21d17c783f03\" (UID: \"c8576918-5101-4cce-a010-21d17c783f03\") " Dec 05 06:08:18 crc kubenswrapper[4742]: I1205 06:08:18.235172 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8576918-5101-4cce-a010-21d17c783f03-utilities" (OuterVolumeSpecName: "utilities") pod "c8576918-5101-4cce-a010-21d17c783f03" (UID: "c8576918-5101-4cce-a010-21d17c783f03"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:08:18 crc kubenswrapper[4742]: I1205 06:08:18.248231 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8576918-5101-4cce-a010-21d17c783f03-kube-api-access-b8p2z" (OuterVolumeSpecName: "kube-api-access-b8p2z") pod "c8576918-5101-4cce-a010-21d17c783f03" (UID: "c8576918-5101-4cce-a010-21d17c783f03"). InnerVolumeSpecName "kube-api-access-b8p2z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:08:18 crc kubenswrapper[4742]: I1205 06:08:18.250298 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8576918-5101-4cce-a010-21d17c783f03-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c8576918-5101-4cce-a010-21d17c783f03" (UID: "c8576918-5101-4cce-a010-21d17c783f03"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:08:18 crc kubenswrapper[4742]: I1205 06:08:18.336267 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8576918-5101-4cce-a010-21d17c783f03-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:08:18 crc kubenswrapper[4742]: I1205 06:08:18.336323 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8576918-5101-4cce-a010-21d17c783f03-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:08:18 crc kubenswrapper[4742]: I1205 06:08:18.336344 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b8p2z\" (UniqueName: \"kubernetes.io/projected/c8576918-5101-4cce-a010-21d17c783f03-kube-api-access-b8p2z\") on node \"crc\" DevicePath \"\"" Dec 05 06:08:18 crc kubenswrapper[4742]: I1205 06:08:18.754103 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerStarted","Data":"310cca7f57f78facafa7379a55640dd8bda7651e6fa10b0fa067a67c3dc118ef"} Dec 05 06:08:18 crc kubenswrapper[4742]: I1205 06:08:18.757389 4742 generic.go:334] "Generic (PLEG): container finished" podID="c8576918-5101-4cce-a010-21d17c783f03" containerID="cede2ecf2c9c2aa52ac28b60fafd77482af288cc3563f105d2b4748b4153bf73" exitCode=0 Dec 05 06:08:18 crc kubenswrapper[4742]: I1205 06:08:18.757432 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bcrbj" event={"ID":"c8576918-5101-4cce-a010-21d17c783f03","Type":"ContainerDied","Data":"cede2ecf2c9c2aa52ac28b60fafd77482af288cc3563f105d2b4748b4153bf73"} Dec 05 06:08:18 crc kubenswrapper[4742]: I1205 06:08:18.757458 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bcrbj" event={"ID":"c8576918-5101-4cce-a010-21d17c783f03","Type":"ContainerDied","Data":"6b05280e1097192afba4187eefaf620dacbb87a626151dfbe615b38cc88a4512"} Dec 05 06:08:18 crc kubenswrapper[4742]: I1205 06:08:18.757480 4742 scope.go:117] "RemoveContainer" containerID="cede2ecf2c9c2aa52ac28b60fafd77482af288cc3563f105d2b4748b4153bf73" Dec 05 06:08:18 crc kubenswrapper[4742]: I1205 06:08:18.757593 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bcrbj" Dec 05 06:08:18 crc kubenswrapper[4742]: I1205 06:08:18.779224 4742 scope.go:117] "RemoveContainer" containerID="b063d869a2dcd0730e936499bda30b0fcb0fad7d68168cc8e8c095874234c97c" Dec 05 06:08:18 crc kubenswrapper[4742]: I1205 06:08:18.790692 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bcrbj"] Dec 05 06:08:18 crc kubenswrapper[4742]: I1205 06:08:18.795616 4742 scope.go:117] "RemoveContainer" containerID="10f1c63aa46e5d656dea32fcee1de434cf6b00c29a086177f0dde4e83bc11d87" Dec 05 06:08:18 crc kubenswrapper[4742]: I1205 06:08:18.801506 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-bcrbj"] Dec 05 06:08:18 crc kubenswrapper[4742]: I1205 06:08:18.814470 4742 scope.go:117] "RemoveContainer" containerID="cede2ecf2c9c2aa52ac28b60fafd77482af288cc3563f105d2b4748b4153bf73" Dec 05 06:08:18 crc kubenswrapper[4742]: E1205 06:08:18.814911 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cede2ecf2c9c2aa52ac28b60fafd77482af288cc3563f105d2b4748b4153bf73\": container with ID starting with cede2ecf2c9c2aa52ac28b60fafd77482af288cc3563f105d2b4748b4153bf73 not found: ID does not exist" containerID="cede2ecf2c9c2aa52ac28b60fafd77482af288cc3563f105d2b4748b4153bf73" Dec 05 06:08:18 crc kubenswrapper[4742]: I1205 06:08:18.814959 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cede2ecf2c9c2aa52ac28b60fafd77482af288cc3563f105d2b4748b4153bf73"} err="failed to get container status \"cede2ecf2c9c2aa52ac28b60fafd77482af288cc3563f105d2b4748b4153bf73\": rpc error: code = NotFound desc = could not find container \"cede2ecf2c9c2aa52ac28b60fafd77482af288cc3563f105d2b4748b4153bf73\": container with ID starting with cede2ecf2c9c2aa52ac28b60fafd77482af288cc3563f105d2b4748b4153bf73 not found: ID does not exist" Dec 05 06:08:18 crc kubenswrapper[4742]: I1205 06:08:18.815015 4742 scope.go:117] "RemoveContainer" containerID="b063d869a2dcd0730e936499bda30b0fcb0fad7d68168cc8e8c095874234c97c" Dec 05 06:08:18 crc kubenswrapper[4742]: E1205 06:08:18.815339 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b063d869a2dcd0730e936499bda30b0fcb0fad7d68168cc8e8c095874234c97c\": container with ID starting with b063d869a2dcd0730e936499bda30b0fcb0fad7d68168cc8e8c095874234c97c not found: ID does not exist" containerID="b063d869a2dcd0730e936499bda30b0fcb0fad7d68168cc8e8c095874234c97c" Dec 05 06:08:18 crc kubenswrapper[4742]: I1205 06:08:18.815386 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b063d869a2dcd0730e936499bda30b0fcb0fad7d68168cc8e8c095874234c97c"} err="failed to get container status \"b063d869a2dcd0730e936499bda30b0fcb0fad7d68168cc8e8c095874234c97c\": rpc error: code = NotFound desc = could not find container \"b063d869a2dcd0730e936499bda30b0fcb0fad7d68168cc8e8c095874234c97c\": container with ID starting with b063d869a2dcd0730e936499bda30b0fcb0fad7d68168cc8e8c095874234c97c not found: ID does not exist" Dec 05 06:08:18 crc kubenswrapper[4742]: I1205 06:08:18.815420 4742 scope.go:117] "RemoveContainer" containerID="10f1c63aa46e5d656dea32fcee1de434cf6b00c29a086177f0dde4e83bc11d87" Dec 05 06:08:18 crc kubenswrapper[4742]: E1205 06:08:18.815753 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10f1c63aa46e5d656dea32fcee1de434cf6b00c29a086177f0dde4e83bc11d87\": container with ID starting with 10f1c63aa46e5d656dea32fcee1de434cf6b00c29a086177f0dde4e83bc11d87 not found: ID does not exist" containerID="10f1c63aa46e5d656dea32fcee1de434cf6b00c29a086177f0dde4e83bc11d87" Dec 05 06:08:18 crc kubenswrapper[4742]: I1205 06:08:18.815777 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10f1c63aa46e5d656dea32fcee1de434cf6b00c29a086177f0dde4e83bc11d87"} err="failed to get container status \"10f1c63aa46e5d656dea32fcee1de434cf6b00c29a086177f0dde4e83bc11d87\": rpc error: code = NotFound desc = could not find container \"10f1c63aa46e5d656dea32fcee1de434cf6b00c29a086177f0dde4e83bc11d87\": container with ID starting with 10f1c63aa46e5d656dea32fcee1de434cf6b00c29a086177f0dde4e83bc11d87 not found: ID does not exist" Dec 05 06:08:20 crc kubenswrapper[4742]: I1205 06:08:20.388219 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8576918-5101-4cce-a010-21d17c783f03" path="/var/lib/kubelet/pods/c8576918-5101-4cce-a010-21d17c783f03/volumes" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.674533 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-gmqkf"] Dec 05 06:08:33 crc kubenswrapper[4742]: E1205 06:08:33.675518 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8576918-5101-4cce-a010-21d17c783f03" containerName="registry-server" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.675569 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8576918-5101-4cce-a010-21d17c783f03" containerName="registry-server" Dec 05 06:08:33 crc kubenswrapper[4742]: E1205 06:08:33.675597 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8576918-5101-4cce-a010-21d17c783f03" containerName="extract-utilities" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.675608 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8576918-5101-4cce-a010-21d17c783f03" containerName="extract-utilities" Dec 05 06:08:33 crc kubenswrapper[4742]: E1205 06:08:33.675627 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8576918-5101-4cce-a010-21d17c783f03" containerName="extract-content" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.675638 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8576918-5101-4cce-a010-21d17c783f03" containerName="extract-content" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.675824 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8576918-5101-4cce-a010-21d17c783f03" containerName="registry-server" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.676761 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-gmqkf" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.680037 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-b6znl" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.683728 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6c677c69b-7t6zt"] Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.685223 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-7t6zt" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.686789 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-r8k2t" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.694425 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-gmqkf"] Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.703325 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-697fb699cf-dp5sb"] Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.704287 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-dp5sb" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.706794 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-k7f9p" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.711973 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6c677c69b-7t6zt"] Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.721509 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-697fb699cf-dp5sb"] Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.737393 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-5697bb5779-2c5fr"] Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.738464 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-2c5fr" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.741195 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-rtftr" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.750685 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-5697bb5779-2c5fr"] Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.767040 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-2sqlp"] Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.768400 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-2sqlp" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.773817 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-p2zjc" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.799425 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-2sqlp"] Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.801444 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wd9s4\" (UniqueName: \"kubernetes.io/projected/55a3d509-dd87-42fb-be01-6cdd6ffcc70c-kube-api-access-wd9s4\") pod \"designate-operator-controller-manager-697fb699cf-dp5sb\" (UID: \"55a3d509-dd87-42fb-be01-6cdd6ffcc70c\") " pod="openstack-operators/designate-operator-controller-manager-697fb699cf-dp5sb" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.801522 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bz9h\" (UniqueName: \"kubernetes.io/projected/491b9b94-2e41-4c0b-8286-6c7c8b460933-kube-api-access-9bz9h\") pod \"barbican-operator-controller-manager-7d9dfd778-gmqkf\" (UID: \"491b9b94-2e41-4c0b-8286-6c7c8b460933\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-gmqkf" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.801598 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sk2z4\" (UniqueName: \"kubernetes.io/projected/d1b68a24-f581-4b06-a05a-be291467b34b-kube-api-access-sk2z4\") pod \"cinder-operator-controller-manager-6c677c69b-7t6zt\" (UID: \"d1b68a24-f581-4b06-a05a-be291467b34b\") " pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-7t6zt" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.811502 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-9nxmx"] Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.813305 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-9nxmx" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.820369 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-75lzb" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.826568 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-758b7cbd9c-4bwlv"] Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.827616 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-758b7cbd9c-4bwlv" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.829654 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.829898 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-mbfpq" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.839583 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-967d97867-d77rg"] Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.849034 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-967d97867-d77rg" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.853852 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-cj4sk" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.865533 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-758b7cbd9c-4bwlv"] Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.880601 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-967d97867-d77rg"] Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.890106 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-kbvkg"] Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.901154 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-kbvkg" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.904864 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bz9h\" (UniqueName: \"kubernetes.io/projected/491b9b94-2e41-4c0b-8286-6c7c8b460933-kube-api-access-9bz9h\") pod \"barbican-operator-controller-manager-7d9dfd778-gmqkf\" (UID: \"491b9b94-2e41-4c0b-8286-6c7c8b460933\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-gmqkf" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.904959 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sk2z4\" (UniqueName: \"kubernetes.io/projected/d1b68a24-f581-4b06-a05a-be291467b34b-kube-api-access-sk2z4\") pod \"cinder-operator-controller-manager-6c677c69b-7t6zt\" (UID: \"d1b68a24-f581-4b06-a05a-be291467b34b\") " pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-7t6zt" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.905002 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mqk9\" (UniqueName: \"kubernetes.io/projected/6b229432-3291-4696-bc76-eda16eda1a3d-kube-api-access-8mqk9\") pod \"heat-operator-controller-manager-5f64f6f8bb-2sqlp\" (UID: \"6b229432-3291-4696-bc76-eda16eda1a3d\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-2sqlp" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.905035 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4qb6\" (UniqueName: \"kubernetes.io/projected/c57dd655-4793-45cd-9e28-ebf4793af611-kube-api-access-l4qb6\") pod \"glance-operator-controller-manager-5697bb5779-2c5fr\" (UID: \"c57dd655-4793-45cd-9e28-ebf4793af611\") " pod="openstack-operators/glance-operator-controller-manager-5697bb5779-2c5fr" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.905508 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-jpwh4" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.917150 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wd9s4\" (UniqueName: \"kubernetes.io/projected/55a3d509-dd87-42fb-be01-6cdd6ffcc70c-kube-api-access-wd9s4\") pod \"designate-operator-controller-manager-697fb699cf-dp5sb\" (UID: \"55a3d509-dd87-42fb-be01-6cdd6ffcc70c\") " pod="openstack-operators/designate-operator-controller-manager-697fb699cf-dp5sb" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.943197 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-9nxmx"] Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.946938 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sk2z4\" (UniqueName: \"kubernetes.io/projected/d1b68a24-f581-4b06-a05a-be291467b34b-kube-api-access-sk2z4\") pod \"cinder-operator-controller-manager-6c677c69b-7t6zt\" (UID: \"d1b68a24-f581-4b06-a05a-be291467b34b\") " pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-7t6zt" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.953071 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-m2g95"] Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.955693 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-m2g95" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.958107 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-cshgb" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.958785 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bz9h\" (UniqueName: \"kubernetes.io/projected/491b9b94-2e41-4c0b-8286-6c7c8b460933-kube-api-access-9bz9h\") pod \"barbican-operator-controller-manager-7d9dfd778-gmqkf\" (UID: \"491b9b94-2e41-4c0b-8286-6c7c8b460933\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-gmqkf" Dec 05 06:08:33 crc kubenswrapper[4742]: I1205 06:08:33.968763 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wd9s4\" (UniqueName: \"kubernetes.io/projected/55a3d509-dd87-42fb-be01-6cdd6ffcc70c-kube-api-access-wd9s4\") pod \"designate-operator-controller-manager-697fb699cf-dp5sb\" (UID: \"55a3d509-dd87-42fb-be01-6cdd6ffcc70c\") " pod="openstack-operators/designate-operator-controller-manager-697fb699cf-dp5sb" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.003551 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-kbvkg"] Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.007009 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-gmqkf" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.009993 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-m2g95"] Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.020408 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwjz8\" (UniqueName: \"kubernetes.io/projected/9d7f230e-fc9c-46a0-b31f-2b0772107ebb-kube-api-access-dwjz8\") pod \"infra-operator-controller-manager-758b7cbd9c-4bwlv\" (UID: \"9d7f230e-fc9c-46a0-b31f-2b0772107ebb\") " pod="openstack-operators/infra-operator-controller-manager-758b7cbd9c-4bwlv" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.020461 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j68b9\" (UniqueName: \"kubernetes.io/projected/b9dba9a5-804f-4b60-9e89-0e9dfeba1d44-kube-api-access-j68b9\") pod \"keystone-operator-controller-manager-7765d96ddf-kbvkg\" (UID: \"b9dba9a5-804f-4b60-9e89-0e9dfeba1d44\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-kbvkg" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.020507 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mqk9\" (UniqueName: \"kubernetes.io/projected/6b229432-3291-4696-bc76-eda16eda1a3d-kube-api-access-8mqk9\") pod \"heat-operator-controller-manager-5f64f6f8bb-2sqlp\" (UID: \"6b229432-3291-4696-bc76-eda16eda1a3d\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-2sqlp" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.020559 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4qb6\" (UniqueName: \"kubernetes.io/projected/c57dd655-4793-45cd-9e28-ebf4793af611-kube-api-access-l4qb6\") pod \"glance-operator-controller-manager-5697bb5779-2c5fr\" (UID: \"c57dd655-4793-45cd-9e28-ebf4793af611\") " pod="openstack-operators/glance-operator-controller-manager-5697bb5779-2c5fr" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.020607 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gxd9\" (UniqueName: \"kubernetes.io/projected/030626ef-00d4-4b99-b629-0b25c15c2c55-kube-api-access-2gxd9\") pod \"horizon-operator-controller-manager-68c6d99b8f-9nxmx\" (UID: \"030626ef-00d4-4b99-b629-0b25c15c2c55\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-9nxmx" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.020629 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9d7f230e-fc9c-46a0-b31f-2b0772107ebb-cert\") pod \"infra-operator-controller-manager-758b7cbd9c-4bwlv\" (UID: \"9d7f230e-fc9c-46a0-b31f-2b0772107ebb\") " pod="openstack-operators/infra-operator-controller-manager-758b7cbd9c-4bwlv" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.020678 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5j892\" (UniqueName: \"kubernetes.io/projected/93299aa7-920e-4725-9546-1376e21f8652-kube-api-access-5j892\") pod \"manila-operator-controller-manager-7c79b5df47-m2g95\" (UID: \"93299aa7-920e-4725-9546-1376e21f8652\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-m2g95" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.020696 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whvrk\" (UniqueName: \"kubernetes.io/projected/d8579acb-f382-474b-94ae-86a304ddcaec-kube-api-access-whvrk\") pod \"ironic-operator-controller-manager-967d97867-d77rg\" (UID: \"d8579acb-f382-474b-94ae-86a304ddcaec\") " pod="openstack-operators/ironic-operator-controller-manager-967d97867-d77rg" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.024845 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-79c8c4686c-4xvmq"] Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.025800 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-4xvmq" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.026720 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2qgvf"] Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.027514 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2qgvf" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.030013 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-7t6zt" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.043148 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-dp5sb" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.050399 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-d6tct" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.056499 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mqk9\" (UniqueName: \"kubernetes.io/projected/6b229432-3291-4696-bc76-eda16eda1a3d-kube-api-access-8mqk9\") pod \"heat-operator-controller-manager-5f64f6f8bb-2sqlp\" (UID: \"6b229432-3291-4696-bc76-eda16eda1a3d\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-2sqlp" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.056845 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-wbl8b" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.062657 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4qb6\" (UniqueName: \"kubernetes.io/projected/c57dd655-4793-45cd-9e28-ebf4793af611-kube-api-access-l4qb6\") pod \"glance-operator-controller-manager-5697bb5779-2c5fr\" (UID: \"c57dd655-4793-45cd-9e28-ebf4793af611\") " pod="openstack-operators/glance-operator-controller-manager-5697bb5779-2c5fr" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.068190 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-2c5fr" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.083777 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-79c8c4686c-4xvmq"] Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.093796 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-2sqlp" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.110915 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2qgvf"] Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.120140 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-mq7jm"] Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.121199 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-mq7jm" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.122456 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwjz8\" (UniqueName: \"kubernetes.io/projected/9d7f230e-fc9c-46a0-b31f-2b0772107ebb-kube-api-access-dwjz8\") pod \"infra-operator-controller-manager-758b7cbd9c-4bwlv\" (UID: \"9d7f230e-fc9c-46a0-b31f-2b0772107ebb\") " pod="openstack-operators/infra-operator-controller-manager-758b7cbd9c-4bwlv" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.122485 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j68b9\" (UniqueName: \"kubernetes.io/projected/b9dba9a5-804f-4b60-9e89-0e9dfeba1d44-kube-api-access-j68b9\") pod \"keystone-operator-controller-manager-7765d96ddf-kbvkg\" (UID: \"b9dba9a5-804f-4b60-9e89-0e9dfeba1d44\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-kbvkg" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.122529 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gxd9\" (UniqueName: \"kubernetes.io/projected/030626ef-00d4-4b99-b629-0b25c15c2c55-kube-api-access-2gxd9\") pod \"horizon-operator-controller-manager-68c6d99b8f-9nxmx\" (UID: \"030626ef-00d4-4b99-b629-0b25c15c2c55\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-9nxmx" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.122549 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9d7f230e-fc9c-46a0-b31f-2b0772107ebb-cert\") pod \"infra-operator-controller-manager-758b7cbd9c-4bwlv\" (UID: \"9d7f230e-fc9c-46a0-b31f-2b0772107ebb\") " pod="openstack-operators/infra-operator-controller-manager-758b7cbd9c-4bwlv" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.122582 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5j892\" (UniqueName: \"kubernetes.io/projected/93299aa7-920e-4725-9546-1376e21f8652-kube-api-access-5j892\") pod \"manila-operator-controller-manager-7c79b5df47-m2g95\" (UID: \"93299aa7-920e-4725-9546-1376e21f8652\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-m2g95" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.122602 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whvrk\" (UniqueName: \"kubernetes.io/projected/d8579acb-f382-474b-94ae-86a304ddcaec-kube-api-access-whvrk\") pod \"ironic-operator-controller-manager-967d97867-d77rg\" (UID: \"d8579acb-f382-474b-94ae-86a304ddcaec\") " pod="openstack-operators/ironic-operator-controller-manager-967d97867-d77rg" Dec 05 06:08:34 crc kubenswrapper[4742]: E1205 06:08:34.123261 4742 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 06:08:34 crc kubenswrapper[4742]: E1205 06:08:34.123302 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9d7f230e-fc9c-46a0-b31f-2b0772107ebb-cert podName:9d7f230e-fc9c-46a0-b31f-2b0772107ebb nodeName:}" failed. No retries permitted until 2025-12-05 06:08:34.623286884 +0000 UTC m=+990.535421946 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9d7f230e-fc9c-46a0-b31f-2b0772107ebb-cert") pod "infra-operator-controller-manager-758b7cbd9c-4bwlv" (UID: "9d7f230e-fc9c-46a0-b31f-2b0772107ebb") : secret "infra-operator-webhook-server-cert" not found Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.141690 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-6jt4r" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.151771 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gxd9\" (UniqueName: \"kubernetes.io/projected/030626ef-00d4-4b99-b629-0b25c15c2c55-kube-api-access-2gxd9\") pod \"horizon-operator-controller-manager-68c6d99b8f-9nxmx\" (UID: \"030626ef-00d4-4b99-b629-0b25c15c2c55\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-9nxmx" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.151832 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-xxqrg"] Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.153464 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-xxqrg" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.155255 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwjz8\" (UniqueName: \"kubernetes.io/projected/9d7f230e-fc9c-46a0-b31f-2b0772107ebb-kube-api-access-dwjz8\") pod \"infra-operator-controller-manager-758b7cbd9c-4bwlv\" (UID: \"9d7f230e-fc9c-46a0-b31f-2b0772107ebb\") " pod="openstack-operators/infra-operator-controller-manager-758b7cbd9c-4bwlv" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.155666 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whvrk\" (UniqueName: \"kubernetes.io/projected/d8579acb-f382-474b-94ae-86a304ddcaec-kube-api-access-whvrk\") pod \"ironic-operator-controller-manager-967d97867-d77rg\" (UID: \"d8579acb-f382-474b-94ae-86a304ddcaec\") " pod="openstack-operators/ironic-operator-controller-manager-967d97867-d77rg" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.160808 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-lqx2x" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.160990 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5j892\" (UniqueName: \"kubernetes.io/projected/93299aa7-920e-4725-9546-1376e21f8652-kube-api-access-5j892\") pod \"manila-operator-controller-manager-7c79b5df47-m2g95\" (UID: \"93299aa7-920e-4725-9546-1376e21f8652\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-m2g95" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.169243 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-mq7jm"] Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.172902 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j68b9\" (UniqueName: \"kubernetes.io/projected/b9dba9a5-804f-4b60-9e89-0e9dfeba1d44-kube-api-access-j68b9\") pod \"keystone-operator-controller-manager-7765d96ddf-kbvkg\" (UID: \"b9dba9a5-804f-4b60-9e89-0e9dfeba1d44\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-kbvkg" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.177630 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-xxqrg"] Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.200238 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fngsq9"] Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.202188 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fngsq9" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.205774 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-5vdqj" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.206874 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.206979 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-ml2jd"] Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.207299 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-967d97867-d77rg" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.208857 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-ml2jd" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.223638 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5x752\" (UniqueName: \"kubernetes.io/projected/87b6fb22-4077-4dfa-a66c-10ef740b542c-kube-api-access-5x752\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-2qgvf\" (UID: \"87b6fb22-4077-4dfa-a66c-10ef740b542c\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2qgvf" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.223684 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7r9rl\" (UniqueName: \"kubernetes.io/projected/dbf44717-3f12-426c-9133-ef0dd76cea1a-kube-api-access-7r9rl\") pod \"nova-operator-controller-manager-697bc559fc-mq7jm\" (UID: \"dbf44717-3f12-426c-9133-ef0dd76cea1a\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-mq7jm" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.223759 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xr4b5\" (UniqueName: \"kubernetes.io/projected/3cd456e8-3d67-43bb-9aaf-006acae0a913-kube-api-access-xr4b5\") pod \"mariadb-operator-controller-manager-79c8c4686c-4xvmq\" (UID: \"3cd456e8-3d67-43bb-9aaf-006acae0a913\") " pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-4xvmq" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.223862 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-p8wkz" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.230036 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-8vrd9"] Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.234182 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-8vrd9" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.244961 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-pxktc" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.251160 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-ml2jd"] Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.262172 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-8vrd9"] Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.277093 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fngsq9"] Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.291593 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-9d58d64bc-xvrrk"] Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.292926 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-xvrrk" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.299429 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-bxdln" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.309476 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-58d5ff84df-tqxd6"] Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.310592 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-tqxd6" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.314916 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-kbvkg" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.318763 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-dz6jx" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.324514 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xr4b5\" (UniqueName: \"kubernetes.io/projected/3cd456e8-3d67-43bb-9aaf-006acae0a913-kube-api-access-xr4b5\") pod \"mariadb-operator-controller-manager-79c8c4686c-4xvmq\" (UID: \"3cd456e8-3d67-43bb-9aaf-006acae0a913\") " pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-4xvmq" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.324597 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjd9t\" (UniqueName: \"kubernetes.io/projected/96e75197-2f06-41ef-acca-0752e684ab72-kube-api-access-zjd9t\") pod \"openstack-baremetal-operator-controller-manager-84b575879fngsq9\" (UID: \"96e75197-2f06-41ef-acca-0752e684ab72\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fngsq9" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.324621 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7825\" (UniqueName: \"kubernetes.io/projected/7296ffb7-3049-44eb-80d1-850817ee1fac-kube-api-access-n7825\") pod \"ovn-operator-controller-manager-b6456fdb6-ml2jd\" (UID: \"7296ffb7-3049-44eb-80d1-850817ee1fac\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-ml2jd" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.324643 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/96e75197-2f06-41ef-acca-0752e684ab72-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879fngsq9\" (UID: \"96e75197-2f06-41ef-acca-0752e684ab72\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fngsq9" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.324668 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5x752\" (UniqueName: \"kubernetes.io/projected/87b6fb22-4077-4dfa-a66c-10ef740b542c-kube-api-access-5x752\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-2qgvf\" (UID: \"87b6fb22-4077-4dfa-a66c-10ef740b542c\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2qgvf" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.324695 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7r9rl\" (UniqueName: \"kubernetes.io/projected/dbf44717-3f12-426c-9133-ef0dd76cea1a-kube-api-access-7r9rl\") pod \"nova-operator-controller-manager-697bc559fc-mq7jm\" (UID: \"dbf44717-3f12-426c-9133-ef0dd76cea1a\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-mq7jm" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.324719 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4bzn\" (UniqueName: \"kubernetes.io/projected/47af7008-5488-4a6a-836a-602844f186c9-kube-api-access-f4bzn\") pod \"octavia-operator-controller-manager-998648c74-xxqrg\" (UID: \"47af7008-5488-4a6a-836a-602844f186c9\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-xxqrg" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.343406 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-m2g95" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.350121 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xr4b5\" (UniqueName: \"kubernetes.io/projected/3cd456e8-3d67-43bb-9aaf-006acae0a913-kube-api-access-xr4b5\") pod \"mariadb-operator-controller-manager-79c8c4686c-4xvmq\" (UID: \"3cd456e8-3d67-43bb-9aaf-006acae0a913\") " pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-4xvmq" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.350139 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5x752\" (UniqueName: \"kubernetes.io/projected/87b6fb22-4077-4dfa-a66c-10ef740b542c-kube-api-access-5x752\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-2qgvf\" (UID: \"87b6fb22-4077-4dfa-a66c-10ef740b542c\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2qgvf" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.353385 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7r9rl\" (UniqueName: \"kubernetes.io/projected/dbf44717-3f12-426c-9133-ef0dd76cea1a-kube-api-access-7r9rl\") pod \"nova-operator-controller-manager-697bc559fc-mq7jm\" (UID: \"dbf44717-3f12-426c-9133-ef0dd76cea1a\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-mq7jm" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.359211 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-58d5ff84df-tqxd6"] Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.412980 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-4xvmq" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.415383 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-9d58d64bc-xvrrk"] Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.426014 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4bzn\" (UniqueName: \"kubernetes.io/projected/47af7008-5488-4a6a-836a-602844f186c9-kube-api-access-f4bzn\") pod \"octavia-operator-controller-manager-998648c74-xxqrg\" (UID: \"47af7008-5488-4a6a-836a-602844f186c9\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-xxqrg" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.426047 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lknfd\" (UniqueName: \"kubernetes.io/projected/1b8c3dc7-6086-4c81-a1f7-a9bea62a8a4f-kube-api-access-lknfd\") pod \"telemetry-operator-controller-manager-58d5ff84df-tqxd6\" (UID: \"1b8c3dc7-6086-4c81-a1f7-a9bea62a8a4f\") " pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-tqxd6" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.426156 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzw78\" (UniqueName: \"kubernetes.io/projected/8b68f95e-f3d8-4e0e-a1a7-f5769e47f3b1-kube-api-access-lzw78\") pod \"placement-operator-controller-manager-78f8948974-8vrd9\" (UID: \"8b68f95e-f3d8-4e0e-a1a7-f5769e47f3b1\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-8vrd9" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.426184 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdgpr\" (UniqueName: \"kubernetes.io/projected/80c34a09-8c71-40d2-828e-b5e416ca4e5d-kube-api-access-qdgpr\") pod \"swift-operator-controller-manager-9d58d64bc-xvrrk\" (UID: \"80c34a09-8c71-40d2-828e-b5e416ca4e5d\") " pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-xvrrk" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.426209 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjd9t\" (UniqueName: \"kubernetes.io/projected/96e75197-2f06-41ef-acca-0752e684ab72-kube-api-access-zjd9t\") pod \"openstack-baremetal-operator-controller-manager-84b575879fngsq9\" (UID: \"96e75197-2f06-41ef-acca-0752e684ab72\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fngsq9" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.426227 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7825\" (UniqueName: \"kubernetes.io/projected/7296ffb7-3049-44eb-80d1-850817ee1fac-kube-api-access-n7825\") pod \"ovn-operator-controller-manager-b6456fdb6-ml2jd\" (UID: \"7296ffb7-3049-44eb-80d1-850817ee1fac\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-ml2jd" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.426245 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/96e75197-2f06-41ef-acca-0752e684ab72-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879fngsq9\" (UID: \"96e75197-2f06-41ef-acca-0752e684ab72\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fngsq9" Dec 05 06:08:34 crc kubenswrapper[4742]: E1205 06:08:34.426347 4742 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 06:08:34 crc kubenswrapper[4742]: E1205 06:08:34.426388 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/96e75197-2f06-41ef-acca-0752e684ab72-cert podName:96e75197-2f06-41ef-acca-0752e684ab72 nodeName:}" failed. No retries permitted until 2025-12-05 06:08:34.926376059 +0000 UTC m=+990.838511121 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/96e75197-2f06-41ef-acca-0752e684ab72-cert") pod "openstack-baremetal-operator-controller-manager-84b575879fngsq9" (UID: "96e75197-2f06-41ef-acca-0752e684ab72") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.440735 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-9nxmx" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.446766 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2qgvf" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.451806 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4bzn\" (UniqueName: \"kubernetes.io/projected/47af7008-5488-4a6a-836a-602844f186c9-kube-api-access-f4bzn\") pod \"octavia-operator-controller-manager-998648c74-xxqrg\" (UID: \"47af7008-5488-4a6a-836a-602844f186c9\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-xxqrg" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.489338 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7825\" (UniqueName: \"kubernetes.io/projected/7296ffb7-3049-44eb-80d1-850817ee1fac-kube-api-access-n7825\") pod \"ovn-operator-controller-manager-b6456fdb6-ml2jd\" (UID: \"7296ffb7-3049-44eb-80d1-850817ee1fac\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-ml2jd" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.497298 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-mq7jm" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.501778 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjd9t\" (UniqueName: \"kubernetes.io/projected/96e75197-2f06-41ef-acca-0752e684ab72-kube-api-access-zjd9t\") pod \"openstack-baremetal-operator-controller-manager-84b575879fngsq9\" (UID: \"96e75197-2f06-41ef-acca-0752e684ab72\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fngsq9" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.511782 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-xxqrg" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.531158 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzw78\" (UniqueName: \"kubernetes.io/projected/8b68f95e-f3d8-4e0e-a1a7-f5769e47f3b1-kube-api-access-lzw78\") pod \"placement-operator-controller-manager-78f8948974-8vrd9\" (UID: \"8b68f95e-f3d8-4e0e-a1a7-f5769e47f3b1\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-8vrd9" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.554638 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdgpr\" (UniqueName: \"kubernetes.io/projected/80c34a09-8c71-40d2-828e-b5e416ca4e5d-kube-api-access-qdgpr\") pod \"swift-operator-controller-manager-9d58d64bc-xvrrk\" (UID: \"80c34a09-8c71-40d2-828e-b5e416ca4e5d\") " pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-xvrrk" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.563223 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lknfd\" (UniqueName: \"kubernetes.io/projected/1b8c3dc7-6086-4c81-a1f7-a9bea62a8a4f-kube-api-access-lknfd\") pod \"telemetry-operator-controller-manager-58d5ff84df-tqxd6\" (UID: \"1b8c3dc7-6086-4c81-a1f7-a9bea62a8a4f\") " pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-tqxd6" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.567800 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzw78\" (UniqueName: \"kubernetes.io/projected/8b68f95e-f3d8-4e0e-a1a7-f5769e47f3b1-kube-api-access-lzw78\") pod \"placement-operator-controller-manager-78f8948974-8vrd9\" (UID: \"8b68f95e-f3d8-4e0e-a1a7-f5769e47f3b1\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-8vrd9" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.586475 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdgpr\" (UniqueName: \"kubernetes.io/projected/80c34a09-8c71-40d2-828e-b5e416ca4e5d-kube-api-access-qdgpr\") pod \"swift-operator-controller-manager-9d58d64bc-xvrrk\" (UID: \"80c34a09-8c71-40d2-828e-b5e416ca4e5d\") " pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-xvrrk" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.592667 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lknfd\" (UniqueName: \"kubernetes.io/projected/1b8c3dc7-6086-4c81-a1f7-a9bea62a8a4f-kube-api-access-lknfd\") pod \"telemetry-operator-controller-manager-58d5ff84df-tqxd6\" (UID: \"1b8c3dc7-6086-4c81-a1f7-a9bea62a8a4f\") " pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-tqxd6" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.594933 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-xb5zg"] Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.597709 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-xb5zg" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.601218 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-krrll" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.632753 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-xb5zg"] Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.633276 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-ml2jd" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.664444 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9d7f230e-fc9c-46a0-b31f-2b0772107ebb-cert\") pod \"infra-operator-controller-manager-758b7cbd9c-4bwlv\" (UID: \"9d7f230e-fc9c-46a0-b31f-2b0772107ebb\") " pod="openstack-operators/infra-operator-controller-manager-758b7cbd9c-4bwlv" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.664518 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sg9tk\" (UniqueName: \"kubernetes.io/projected/961ed339-23e1-4d90-a5b9-f0fcdd73df76-kube-api-access-sg9tk\") pod \"test-operator-controller-manager-5854674fcc-xb5zg\" (UID: \"961ed339-23e1-4d90-a5b9-f0fcdd73df76\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-xb5zg" Dec 05 06:08:34 crc kubenswrapper[4742]: E1205 06:08:34.664661 4742 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 06:08:34 crc kubenswrapper[4742]: E1205 06:08:34.664704 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9d7f230e-fc9c-46a0-b31f-2b0772107ebb-cert podName:9d7f230e-fc9c-46a0-b31f-2b0772107ebb nodeName:}" failed. No retries permitted until 2025-12-05 06:08:35.664688408 +0000 UTC m=+991.576823470 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9d7f230e-fc9c-46a0-b31f-2b0772107ebb-cert") pod "infra-operator-controller-manager-758b7cbd9c-4bwlv" (UID: "9d7f230e-fc9c-46a0-b31f-2b0772107ebb") : secret "infra-operator-webhook-server-cert" not found Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.674474 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-667bd8d554-rc6hb"] Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.675626 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-rc6hb" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.679865 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-jr54r" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.685666 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-667bd8d554-rc6hb"] Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.698156 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-8vrd9" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.724159 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6f6696b64-d7l2w"] Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.728738 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-6f6696b64-d7l2w" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.731128 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6f6696b64-d7l2w"] Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.734361 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.734692 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-jv89w" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.734821 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.738675 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5nmbr"] Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.739674 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5nmbr" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.740978 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-xvrrk" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.741538 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-ngg55" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.746385 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5nmbr"] Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.754807 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-gmqkf"] Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.765684 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sg9tk\" (UniqueName: \"kubernetes.io/projected/961ed339-23e1-4d90-a5b9-f0fcdd73df76-kube-api-access-sg9tk\") pod \"test-operator-controller-manager-5854674fcc-xb5zg\" (UID: \"961ed339-23e1-4d90-a5b9-f0fcdd73df76\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-xb5zg" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.790491 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sg9tk\" (UniqueName: \"kubernetes.io/projected/961ed339-23e1-4d90-a5b9-f0fcdd73df76-kube-api-access-sg9tk\") pod \"test-operator-controller-manager-5854674fcc-xb5zg\" (UID: \"961ed339-23e1-4d90-a5b9-f0fcdd73df76\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-xb5zg" Dec 05 06:08:34 crc kubenswrapper[4742]: W1205 06:08:34.792948 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b68a24_f581_4b06_a05a_be291467b34b.slice/crio-caa81edec7093640783c7e03d06558b85411142e334396dd11e2976dcebe27d8 WatchSource:0}: Error finding container caa81edec7093640783c7e03d06558b85411142e334396dd11e2976dcebe27d8: Status 404 returned error can't find the container with id caa81edec7093640783c7e03d06558b85411142e334396dd11e2976dcebe27d8 Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.795939 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-tqxd6" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.811181 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6c677c69b-7t6zt"] Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.867713 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-metrics-certs\") pod \"openstack-operator-controller-manager-6f6696b64-d7l2w\" (UID: \"1b690049-7bae-4629-8183-02c87c0fe640\") " pod="openstack-operators/openstack-operator-controller-manager-6f6696b64-d7l2w" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.867765 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42v98\" (UniqueName: \"kubernetes.io/projected/1b690049-7bae-4629-8183-02c87c0fe640-kube-api-access-42v98\") pod \"openstack-operator-controller-manager-6f6696b64-d7l2w\" (UID: \"1b690049-7bae-4629-8183-02c87c0fe640\") " pod="openstack-operators/openstack-operator-controller-manager-6f6696b64-d7l2w" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.867791 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8fhq\" (UniqueName: \"kubernetes.io/projected/a66da454-e3a7-436a-88d0-05bcf3e954eb-kube-api-access-p8fhq\") pod \"rabbitmq-cluster-operator-manager-668c99d594-5nmbr\" (UID: \"a66da454-e3a7-436a-88d0-05bcf3e954eb\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5nmbr" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.867822 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-webhook-certs\") pod \"openstack-operator-controller-manager-6f6696b64-d7l2w\" (UID: \"1b690049-7bae-4629-8183-02c87c0fe640\") " pod="openstack-operators/openstack-operator-controller-manager-6f6696b64-d7l2w" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.867850 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wj8l\" (UniqueName: \"kubernetes.io/projected/47ef9cc3-82c3-4874-8fbf-9799bb2a8b4c-kube-api-access-8wj8l\") pod \"watcher-operator-controller-manager-667bd8d554-rc6hb\" (UID: \"47ef9cc3-82c3-4874-8fbf-9799bb2a8b4c\") " pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-rc6hb" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.879824 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-7t6zt" event={"ID":"d1b68a24-f581-4b06-a05a-be291467b34b","Type":"ContainerStarted","Data":"caa81edec7093640783c7e03d06558b85411142e334396dd11e2976dcebe27d8"} Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.882259 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-gmqkf" event={"ID":"491b9b94-2e41-4c0b-8286-6c7c8b460933","Type":"ContainerStarted","Data":"e7b2ba2168f56fde6d19ba4e394197898a8407a1a02d5d89ef6ae188b1275e90"} Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.921258 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-697fb699cf-dp5sb"] Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.937515 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-xb5zg" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.969430 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-metrics-certs\") pod \"openstack-operator-controller-manager-6f6696b64-d7l2w\" (UID: \"1b690049-7bae-4629-8183-02c87c0fe640\") " pod="openstack-operators/openstack-operator-controller-manager-6f6696b64-d7l2w" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.969477 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42v98\" (UniqueName: \"kubernetes.io/projected/1b690049-7bae-4629-8183-02c87c0fe640-kube-api-access-42v98\") pod \"openstack-operator-controller-manager-6f6696b64-d7l2w\" (UID: \"1b690049-7bae-4629-8183-02c87c0fe640\") " pod="openstack-operators/openstack-operator-controller-manager-6f6696b64-d7l2w" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.969512 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8fhq\" (UniqueName: \"kubernetes.io/projected/a66da454-e3a7-436a-88d0-05bcf3e954eb-kube-api-access-p8fhq\") pod \"rabbitmq-cluster-operator-manager-668c99d594-5nmbr\" (UID: \"a66da454-e3a7-436a-88d0-05bcf3e954eb\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5nmbr" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.969542 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-webhook-certs\") pod \"openstack-operator-controller-manager-6f6696b64-d7l2w\" (UID: \"1b690049-7bae-4629-8183-02c87c0fe640\") " pod="openstack-operators/openstack-operator-controller-manager-6f6696b64-d7l2w" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.969573 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wj8l\" (UniqueName: \"kubernetes.io/projected/47ef9cc3-82c3-4874-8fbf-9799bb2a8b4c-kube-api-access-8wj8l\") pod \"watcher-operator-controller-manager-667bd8d554-rc6hb\" (UID: \"47ef9cc3-82c3-4874-8fbf-9799bb2a8b4c\") " pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-rc6hb" Dec 05 06:08:34 crc kubenswrapper[4742]: I1205 06:08:34.969603 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/96e75197-2f06-41ef-acca-0752e684ab72-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879fngsq9\" (UID: \"96e75197-2f06-41ef-acca-0752e684ab72\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fngsq9" Dec 05 06:08:34 crc kubenswrapper[4742]: E1205 06:08:34.969787 4742 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 06:08:34 crc kubenswrapper[4742]: E1205 06:08:34.969855 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/96e75197-2f06-41ef-acca-0752e684ab72-cert podName:96e75197-2f06-41ef-acca-0752e684ab72 nodeName:}" failed. No retries permitted until 2025-12-05 06:08:35.969834547 +0000 UTC m=+991.881969609 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/96e75197-2f06-41ef-acca-0752e684ab72-cert") pod "openstack-baremetal-operator-controller-manager-84b575879fngsq9" (UID: "96e75197-2f06-41ef-acca-0752e684ab72") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 06:08:34 crc kubenswrapper[4742]: E1205 06:08:34.970323 4742 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 06:08:34 crc kubenswrapper[4742]: E1205 06:08:34.970352 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-webhook-certs podName:1b690049-7bae-4629-8183-02c87c0fe640 nodeName:}" failed. No retries permitted until 2025-12-05 06:08:35.47034193 +0000 UTC m=+991.382476992 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-webhook-certs") pod "openstack-operator-controller-manager-6f6696b64-d7l2w" (UID: "1b690049-7bae-4629-8183-02c87c0fe640") : secret "webhook-server-cert" not found Dec 05 06:08:34 crc kubenswrapper[4742]: E1205 06:08:34.970558 4742 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 06:08:34 crc kubenswrapper[4742]: E1205 06:08:34.970644 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-metrics-certs podName:1b690049-7bae-4629-8183-02c87c0fe640 nodeName:}" failed. No retries permitted until 2025-12-05 06:08:35.470614887 +0000 UTC m=+991.382750149 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-metrics-certs") pod "openstack-operator-controller-manager-6f6696b64-d7l2w" (UID: "1b690049-7bae-4629-8183-02c87c0fe640") : secret "metrics-server-cert" not found Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.000995 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8fhq\" (UniqueName: \"kubernetes.io/projected/a66da454-e3a7-436a-88d0-05bcf3e954eb-kube-api-access-p8fhq\") pod \"rabbitmq-cluster-operator-manager-668c99d594-5nmbr\" (UID: \"a66da454-e3a7-436a-88d0-05bcf3e954eb\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5nmbr" Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.002204 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42v98\" (UniqueName: \"kubernetes.io/projected/1b690049-7bae-4629-8183-02c87c0fe640-kube-api-access-42v98\") pod \"openstack-operator-controller-manager-6f6696b64-d7l2w\" (UID: \"1b690049-7bae-4629-8183-02c87c0fe640\") " pod="openstack-operators/openstack-operator-controller-manager-6f6696b64-d7l2w" Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.015541 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wj8l\" (UniqueName: \"kubernetes.io/projected/47ef9cc3-82c3-4874-8fbf-9799bb2a8b4c-kube-api-access-8wj8l\") pod \"watcher-operator-controller-manager-667bd8d554-rc6hb\" (UID: \"47ef9cc3-82c3-4874-8fbf-9799bb2a8b4c\") " pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-rc6hb" Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.029162 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-rc6hb" Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.076248 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-5697bb5779-2c5fr"] Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.084771 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5nmbr" Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.102481 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-2sqlp"] Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.139483 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-967d97867-d77rg"] Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.302267 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-m2g95"] Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.323071 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-kbvkg"] Dec 05 06:08:35 crc kubenswrapper[4742]: W1205 06:08:35.327117 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod93299aa7_920e_4725_9546_1376e21f8652.slice/crio-86c5534625ebe67dc98cbcda1cdc4c617d97c210e925199fd2d8204469cb22aa WatchSource:0}: Error finding container 86c5534625ebe67dc98cbcda1cdc4c617d97c210e925199fd2d8204469cb22aa: Status 404 returned error can't find the container with id 86c5534625ebe67dc98cbcda1cdc4c617d97c210e925199fd2d8204469cb22aa Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.486541 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-metrics-certs\") pod \"openstack-operator-controller-manager-6f6696b64-d7l2w\" (UID: \"1b690049-7bae-4629-8183-02c87c0fe640\") " pod="openstack-operators/openstack-operator-controller-manager-6f6696b64-d7l2w" Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.486624 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-webhook-certs\") pod \"openstack-operator-controller-manager-6f6696b64-d7l2w\" (UID: \"1b690049-7bae-4629-8183-02c87c0fe640\") " pod="openstack-operators/openstack-operator-controller-manager-6f6696b64-d7l2w" Dec 05 06:08:35 crc kubenswrapper[4742]: E1205 06:08:35.486788 4742 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 06:08:35 crc kubenswrapper[4742]: E1205 06:08:35.486831 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-webhook-certs podName:1b690049-7bae-4629-8183-02c87c0fe640 nodeName:}" failed. No retries permitted until 2025-12-05 06:08:36.486817084 +0000 UTC m=+992.398952146 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-webhook-certs") pod "openstack-operator-controller-manager-6f6696b64-d7l2w" (UID: "1b690049-7bae-4629-8183-02c87c0fe640") : secret "webhook-server-cert" not found Dec 05 06:08:35 crc kubenswrapper[4742]: E1205 06:08:35.487802 4742 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 06:08:35 crc kubenswrapper[4742]: E1205 06:08:35.487832 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-metrics-certs podName:1b690049-7bae-4629-8183-02c87c0fe640 nodeName:}" failed. No retries permitted until 2025-12-05 06:08:36.48782454 +0000 UTC m=+992.399959602 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-metrics-certs") pod "openstack-operator-controller-manager-6f6696b64-d7l2w" (UID: "1b690049-7bae-4629-8183-02c87c0fe640") : secret "metrics-server-cert" not found Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.508548 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-79c8c4686c-4xvmq"] Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.568634 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-ml2jd"] Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.576419 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-9nxmx"] Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.581087 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-xxqrg"] Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.589649 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2qgvf"] Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.595201 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-mq7jm"] Dec 05 06:08:35 crc kubenswrapper[4742]: W1205 06:08:35.625031 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod47af7008_5488_4a6a_836a_602844f186c9.slice/crio-a91a444a1e14fc6e0379c687dd55cc0eaed5ac631cbc62afa2d37c07ff5c3f46 WatchSource:0}: Error finding container a91a444a1e14fc6e0379c687dd55cc0eaed5ac631cbc62afa2d37c07ff5c3f46: Status 404 returned error can't find the container with id a91a444a1e14fc6e0379c687dd55cc0eaed5ac631cbc62afa2d37c07ff5c3f46 Dec 05 06:08:35 crc kubenswrapper[4742]: W1205 06:08:35.625462 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod87b6fb22_4077_4dfa_a66c_10ef740b542c.slice/crio-7a51a1cc8bf9c6863d5779bba2df85904126389091684530ace9cc44dc99e8f8 WatchSource:0}: Error finding container 7a51a1cc8bf9c6863d5779bba2df85904126389091684530ace9cc44dc99e8f8: Status 404 returned error can't find the container with id 7a51a1cc8bf9c6863d5779bba2df85904126389091684530ace9cc44dc99e8f8 Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.686958 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-8vrd9"] Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.688711 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9d7f230e-fc9c-46a0-b31f-2b0772107ebb-cert\") pod \"infra-operator-controller-manager-758b7cbd9c-4bwlv\" (UID: \"9d7f230e-fc9c-46a0-b31f-2b0772107ebb\") " pod="openstack-operators/infra-operator-controller-manager-758b7cbd9c-4bwlv" Dec 05 06:08:35 crc kubenswrapper[4742]: E1205 06:08:35.688860 4742 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 06:08:35 crc kubenswrapper[4742]: E1205 06:08:35.688924 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9d7f230e-fc9c-46a0-b31f-2b0772107ebb-cert podName:9d7f230e-fc9c-46a0-b31f-2b0772107ebb nodeName:}" failed. No retries permitted until 2025-12-05 06:08:37.688904568 +0000 UTC m=+993.601039630 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9d7f230e-fc9c-46a0-b31f-2b0772107ebb-cert") pod "infra-operator-controller-manager-758b7cbd9c-4bwlv" (UID: "9d7f230e-fc9c-46a0-b31f-2b0772107ebb") : secret "infra-operator-webhook-server-cert" not found Dec 05 06:08:35 crc kubenswrapper[4742]: W1205 06:08:35.693416 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8b68f95e_f3d8_4e0e_a1a7_f5769e47f3b1.slice/crio-78d937d71f45f4488b6fb6009716d1c8ecb9f24da144277a65400835d676071f WatchSource:0}: Error finding container 78d937d71f45f4488b6fb6009716d1c8ecb9f24da144277a65400835d676071f: Status 404 returned error can't find the container with id 78d937d71f45f4488b6fb6009716d1c8ecb9f24da144277a65400835d676071f Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.711672 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-9d58d64bc-xvrrk"] Dec 05 06:08:35 crc kubenswrapper[4742]: E1205 06:08:35.726998 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:3aa109bb973253ae9dcf339b9b65abbd1176cdb4be672c93e538a5f113816991,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qdgpr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-9d58d64bc-xvrrk_openstack-operators(80c34a09-8c71-40d2-828e-b5e416ca4e5d): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 06:08:35 crc kubenswrapper[4742]: E1205 06:08:35.729616 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qdgpr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-9d58d64bc-xvrrk_openstack-operators(80c34a09-8c71-40d2-828e-b5e416ca4e5d): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 06:08:35 crc kubenswrapper[4742]: E1205 06:08:35.730901 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-xvrrk" podUID="80c34a09-8c71-40d2-828e-b5e416ca4e5d" Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.826744 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-58d5ff84df-tqxd6"] Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.834589 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-xb5zg"] Dec 05 06:08:35 crc kubenswrapper[4742]: E1205 06:08:35.838910 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:f27e732ec1faee765461bf137d9be81278b2fa39675019a73622755e1e610b6f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lknfd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-58d5ff84df-tqxd6_openstack-operators(1b8c3dc7-6086-4c81-a1f7-a9bea62a8a4f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 06:08:35 crc kubenswrapper[4742]: E1205 06:08:35.840078 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-sg9tk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-xb5zg_openstack-operators(961ed339-23e1-4d90-a5b9-f0fcdd73df76): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 06:08:35 crc kubenswrapper[4742]: E1205 06:08:35.842179 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lknfd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-58d5ff84df-tqxd6_openstack-operators(1b8c3dc7-6086-4c81-a1f7-a9bea62a8a4f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 06:08:35 crc kubenswrapper[4742]: E1205 06:08:35.842695 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-sg9tk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-xb5zg_openstack-operators(961ed339-23e1-4d90-a5b9-f0fcdd73df76): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 06:08:35 crc kubenswrapper[4742]: E1205 06:08:35.843861 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-xb5zg" podUID="961ed339-23e1-4d90-a5b9-f0fcdd73df76" Dec 05 06:08:35 crc kubenswrapper[4742]: E1205 06:08:35.843921 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-tqxd6" podUID="1b8c3dc7-6086-4c81-a1f7-a9bea62a8a4f" Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.896039 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-8vrd9" event={"ID":"8b68f95e-f3d8-4e0e-a1a7-f5769e47f3b1","Type":"ContainerStarted","Data":"78d937d71f45f4488b6fb6009716d1c8ecb9f24da144277a65400835d676071f"} Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.899336 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-xb5zg" event={"ID":"961ed339-23e1-4d90-a5b9-f0fcdd73df76","Type":"ContainerStarted","Data":"3a75b981f3a5c9bcf06c6fb651c60cf1b46398e2b5cbd86190cdbef3a9ef322c"} Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.900973 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-667bd8d554-rc6hb"] Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.905322 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-xxqrg" event={"ID":"47af7008-5488-4a6a-836a-602844f186c9","Type":"ContainerStarted","Data":"a91a444a1e14fc6e0379c687dd55cc0eaed5ac631cbc62afa2d37c07ff5c3f46"} Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.909507 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5nmbr"] Dec 05 06:08:35 crc kubenswrapper[4742]: E1205 06:08:35.915862 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-xb5zg" podUID="961ed339-23e1-4d90-a5b9-f0fcdd73df76" Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.923767 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-xvrrk" event={"ID":"80c34a09-8c71-40d2-828e-b5e416ca4e5d","Type":"ContainerStarted","Data":"29da682ed9bd154bd433e8cfe12fa0f865e231930b8da769eb0dde3165612803"} Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.929071 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2qgvf" event={"ID":"87b6fb22-4077-4dfa-a66c-10ef740b542c","Type":"ContainerStarted","Data":"7a51a1cc8bf9c6863d5779bba2df85904126389091684530ace9cc44dc99e8f8"} Dec 05 06:08:35 crc kubenswrapper[4742]: E1205 06:08:35.929336 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3aa109bb973253ae9dcf339b9b65abbd1176cdb4be672c93e538a5f113816991\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-xvrrk" podUID="80c34a09-8c71-40d2-828e-b5e416ca4e5d" Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.930877 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-4xvmq" event={"ID":"3cd456e8-3d67-43bb-9aaf-006acae0a913","Type":"ContainerStarted","Data":"1d7e20b7a0c7ae9636cd2ae99ae4d1060bad2fba4c345589c5740d839bbee1cb"} Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.942645 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-tqxd6" event={"ID":"1b8c3dc7-6086-4c81-a1f7-a9bea62a8a4f","Type":"ContainerStarted","Data":"38824c88754c60fd49fc8ad478b2d30d21444aa723277d1b2ca9929caa4e9964"} Dec 05 06:08:35 crc kubenswrapper[4742]: E1205 06:08:35.944738 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:f27e732ec1faee765461bf137d9be81278b2fa39675019a73622755e1e610b6f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-tqxd6" podUID="1b8c3dc7-6086-4c81-a1f7-a9bea62a8a4f" Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.968867 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-2c5fr" event={"ID":"c57dd655-4793-45cd-9e28-ebf4793af611","Type":"ContainerStarted","Data":"b893cf816b75eb3e95ce18acc990dedcb069e056c96d0121f9ca3197705c1a6a"} Dec 05 06:08:35 crc kubenswrapper[4742]: E1205 06:08:35.973019 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-p8fhq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-5nmbr_openstack-operators(a66da454-e3a7-436a-88d0-05bcf3e954eb): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 06:08:35 crc kubenswrapper[4742]: E1205 06:08:35.974216 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5nmbr" podUID="a66da454-e3a7-436a-88d0-05bcf3e954eb" Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.976813 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-ml2jd" event={"ID":"7296ffb7-3049-44eb-80d1-850817ee1fac","Type":"ContainerStarted","Data":"55c4c1923b4d98d7028cdb03966d6a362d947520a94f40d8424e44351bd863eb"} Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.978763 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-9nxmx" event={"ID":"030626ef-00d4-4b99-b629-0b25c15c2c55","Type":"ContainerStarted","Data":"04a9e4b21cd7332fc2d9c436be309a328cae1f6102a6d85e4eb97c30eb81657b"} Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.983570 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-967d97867-d77rg" event={"ID":"d8579acb-f382-474b-94ae-86a304ddcaec","Type":"ContainerStarted","Data":"0eb54196bdbe0935a6998318319eaa6cbee6bb74656f25564858c7ab8aac3e83"} Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.997376 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-2sqlp" event={"ID":"6b229432-3291-4696-bc76-eda16eda1a3d","Type":"ContainerStarted","Data":"e8219008d6135a41162fa7892f051868e19d1e55a30201add7bcaa1581f10eac"} Dec 05 06:08:35 crc kubenswrapper[4742]: I1205 06:08:35.997657 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/96e75197-2f06-41ef-acca-0752e684ab72-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879fngsq9\" (UID: \"96e75197-2f06-41ef-acca-0752e684ab72\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fngsq9" Dec 05 06:08:35 crc kubenswrapper[4742]: E1205 06:08:35.998956 4742 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 06:08:35 crc kubenswrapper[4742]: E1205 06:08:35.998995 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/96e75197-2f06-41ef-acca-0752e684ab72-cert podName:96e75197-2f06-41ef-acca-0752e684ab72 nodeName:}" failed. No retries permitted until 2025-12-05 06:08:37.998982635 +0000 UTC m=+993.911117697 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/96e75197-2f06-41ef-acca-0752e684ab72-cert") pod "openstack-baremetal-operator-controller-manager-84b575879fngsq9" (UID: "96e75197-2f06-41ef-acca-0752e684ab72") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 06:08:36 crc kubenswrapper[4742]: I1205 06:08:36.000992 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-dp5sb" event={"ID":"55a3d509-dd87-42fb-be01-6cdd6ffcc70c","Type":"ContainerStarted","Data":"8bc09eb392af368651bb4a70cda1d845e7d41bfdfc12255f36d817bc338c016d"} Dec 05 06:08:36 crc kubenswrapper[4742]: I1205 06:08:36.004505 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-m2g95" event={"ID":"93299aa7-920e-4725-9546-1376e21f8652","Type":"ContainerStarted","Data":"86c5534625ebe67dc98cbcda1cdc4c617d97c210e925199fd2d8204469cb22aa"} Dec 05 06:08:36 crc kubenswrapper[4742]: I1205 06:08:36.006339 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-mq7jm" event={"ID":"dbf44717-3f12-426c-9133-ef0dd76cea1a","Type":"ContainerStarted","Data":"62ba05ff2e31ba364ca70687503a148186aa4896d9e66289c618ef4d0611f22a"} Dec 05 06:08:36 crc kubenswrapper[4742]: I1205 06:08:36.007530 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-kbvkg" event={"ID":"b9dba9a5-804f-4b60-9e89-0e9dfeba1d44","Type":"ContainerStarted","Data":"2998d9a559fdbf8ba7f833550d918be8d958af6deef646b53a3dfc817e6c7b5f"} Dec 05 06:08:36 crc kubenswrapper[4742]: I1205 06:08:36.503706 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-webhook-certs\") pod \"openstack-operator-controller-manager-6f6696b64-d7l2w\" (UID: \"1b690049-7bae-4629-8183-02c87c0fe640\") " pod="openstack-operators/openstack-operator-controller-manager-6f6696b64-d7l2w" Dec 05 06:08:36 crc kubenswrapper[4742]: E1205 06:08:36.503925 4742 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 06:08:36 crc kubenswrapper[4742]: E1205 06:08:36.504333 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-webhook-certs podName:1b690049-7bae-4629-8183-02c87c0fe640 nodeName:}" failed. No retries permitted until 2025-12-05 06:08:38.504308219 +0000 UTC m=+994.416443281 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-webhook-certs") pod "openstack-operator-controller-manager-6f6696b64-d7l2w" (UID: "1b690049-7bae-4629-8183-02c87c0fe640") : secret "webhook-server-cert" not found Dec 05 06:08:36 crc kubenswrapper[4742]: I1205 06:08:36.505530 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-metrics-certs\") pod \"openstack-operator-controller-manager-6f6696b64-d7l2w\" (UID: \"1b690049-7bae-4629-8183-02c87c0fe640\") " pod="openstack-operators/openstack-operator-controller-manager-6f6696b64-d7l2w" Dec 05 06:08:36 crc kubenswrapper[4742]: E1205 06:08:36.506874 4742 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 06:08:36 crc kubenswrapper[4742]: E1205 06:08:36.506908 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-metrics-certs podName:1b690049-7bae-4629-8183-02c87c0fe640 nodeName:}" failed. No retries permitted until 2025-12-05 06:08:38.506896386 +0000 UTC m=+994.419031448 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-metrics-certs") pod "openstack-operator-controller-manager-6f6696b64-d7l2w" (UID: "1b690049-7bae-4629-8183-02c87c0fe640") : secret "metrics-server-cert" not found Dec 05 06:08:37 crc kubenswrapper[4742]: I1205 06:08:37.036846 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-rc6hb" event={"ID":"47ef9cc3-82c3-4874-8fbf-9799bb2a8b4c","Type":"ContainerStarted","Data":"f4f2eb36ff5718c3cfa6551f95a33af35254057b9b61bd201a8ec4dcc04a5d14"} Dec 05 06:08:37 crc kubenswrapper[4742]: I1205 06:08:37.039506 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5nmbr" event={"ID":"a66da454-e3a7-436a-88d0-05bcf3e954eb","Type":"ContainerStarted","Data":"7e1b5d3c3e45f1fce7403952f8c8d738f99b45d775fec19b1ee99088ff1e0b71"} Dec 05 06:08:37 crc kubenswrapper[4742]: E1205 06:08:37.042995 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5nmbr" podUID="a66da454-e3a7-436a-88d0-05bcf3e954eb" Dec 05 06:08:37 crc kubenswrapper[4742]: E1205 06:08:37.043115 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:f27e732ec1faee765461bf137d9be81278b2fa39675019a73622755e1e610b6f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-tqxd6" podUID="1b8c3dc7-6086-4c81-a1f7-a9bea62a8a4f" Dec 05 06:08:37 crc kubenswrapper[4742]: E1205 06:08:37.043256 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3aa109bb973253ae9dcf339b9b65abbd1176cdb4be672c93e538a5f113816991\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-xvrrk" podUID="80c34a09-8c71-40d2-828e-b5e416ca4e5d" Dec 05 06:08:37 crc kubenswrapper[4742]: E1205 06:08:37.043511 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-xb5zg" podUID="961ed339-23e1-4d90-a5b9-f0fcdd73df76" Dec 05 06:08:37 crc kubenswrapper[4742]: I1205 06:08:37.730656 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9d7f230e-fc9c-46a0-b31f-2b0772107ebb-cert\") pod \"infra-operator-controller-manager-758b7cbd9c-4bwlv\" (UID: \"9d7f230e-fc9c-46a0-b31f-2b0772107ebb\") " pod="openstack-operators/infra-operator-controller-manager-758b7cbd9c-4bwlv" Dec 05 06:08:37 crc kubenswrapper[4742]: E1205 06:08:37.731101 4742 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 06:08:37 crc kubenswrapper[4742]: E1205 06:08:37.731157 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9d7f230e-fc9c-46a0-b31f-2b0772107ebb-cert podName:9d7f230e-fc9c-46a0-b31f-2b0772107ebb nodeName:}" failed. No retries permitted until 2025-12-05 06:08:41.731139969 +0000 UTC m=+997.643275031 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9d7f230e-fc9c-46a0-b31f-2b0772107ebb-cert") pod "infra-operator-controller-manager-758b7cbd9c-4bwlv" (UID: "9d7f230e-fc9c-46a0-b31f-2b0772107ebb") : secret "infra-operator-webhook-server-cert" not found Dec 05 06:08:38 crc kubenswrapper[4742]: I1205 06:08:38.039398 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/96e75197-2f06-41ef-acca-0752e684ab72-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879fngsq9\" (UID: \"96e75197-2f06-41ef-acca-0752e684ab72\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fngsq9" Dec 05 06:08:38 crc kubenswrapper[4742]: E1205 06:08:38.039524 4742 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 06:08:38 crc kubenswrapper[4742]: E1205 06:08:38.039588 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/96e75197-2f06-41ef-acca-0752e684ab72-cert podName:96e75197-2f06-41ef-acca-0752e684ab72 nodeName:}" failed. No retries permitted until 2025-12-05 06:08:42.039571133 +0000 UTC m=+997.951706185 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/96e75197-2f06-41ef-acca-0752e684ab72-cert") pod "openstack-baremetal-operator-controller-manager-84b575879fngsq9" (UID: "96e75197-2f06-41ef-acca-0752e684ab72") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 06:08:38 crc kubenswrapper[4742]: E1205 06:08:38.049601 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5nmbr" podUID="a66da454-e3a7-436a-88d0-05bcf3e954eb" Dec 05 06:08:38 crc kubenswrapper[4742]: I1205 06:08:38.546353 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-metrics-certs\") pod \"openstack-operator-controller-manager-6f6696b64-d7l2w\" (UID: \"1b690049-7bae-4629-8183-02c87c0fe640\") " pod="openstack-operators/openstack-operator-controller-manager-6f6696b64-d7l2w" Dec 05 06:08:38 crc kubenswrapper[4742]: I1205 06:08:38.546681 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-webhook-certs\") pod \"openstack-operator-controller-manager-6f6696b64-d7l2w\" (UID: \"1b690049-7bae-4629-8183-02c87c0fe640\") " pod="openstack-operators/openstack-operator-controller-manager-6f6696b64-d7l2w" Dec 05 06:08:38 crc kubenswrapper[4742]: E1205 06:08:38.546516 4742 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 06:08:38 crc kubenswrapper[4742]: E1205 06:08:38.546789 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-metrics-certs podName:1b690049-7bae-4629-8183-02c87c0fe640 nodeName:}" failed. No retries permitted until 2025-12-05 06:08:42.546771046 +0000 UTC m=+998.458906108 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-metrics-certs") pod "openstack-operator-controller-manager-6f6696b64-d7l2w" (UID: "1b690049-7bae-4629-8183-02c87c0fe640") : secret "metrics-server-cert" not found Dec 05 06:08:38 crc kubenswrapper[4742]: E1205 06:08:38.546799 4742 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 06:08:38 crc kubenswrapper[4742]: E1205 06:08:38.546853 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-webhook-certs podName:1b690049-7bae-4629-8183-02c87c0fe640 nodeName:}" failed. No retries permitted until 2025-12-05 06:08:42.546835568 +0000 UTC m=+998.458970690 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-webhook-certs") pod "openstack-operator-controller-manager-6f6696b64-d7l2w" (UID: "1b690049-7bae-4629-8183-02c87c0fe640") : secret "webhook-server-cert" not found Dec 05 06:08:41 crc kubenswrapper[4742]: I1205 06:08:41.816106 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9d7f230e-fc9c-46a0-b31f-2b0772107ebb-cert\") pod \"infra-operator-controller-manager-758b7cbd9c-4bwlv\" (UID: \"9d7f230e-fc9c-46a0-b31f-2b0772107ebb\") " pod="openstack-operators/infra-operator-controller-manager-758b7cbd9c-4bwlv" Dec 05 06:08:41 crc kubenswrapper[4742]: E1205 06:08:41.816297 4742 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 06:08:41 crc kubenswrapper[4742]: E1205 06:08:41.816529 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9d7f230e-fc9c-46a0-b31f-2b0772107ebb-cert podName:9d7f230e-fc9c-46a0-b31f-2b0772107ebb nodeName:}" failed. No retries permitted until 2025-12-05 06:08:49.816509263 +0000 UTC m=+1005.728644325 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/9d7f230e-fc9c-46a0-b31f-2b0772107ebb-cert") pod "infra-operator-controller-manager-758b7cbd9c-4bwlv" (UID: "9d7f230e-fc9c-46a0-b31f-2b0772107ebb") : secret "infra-operator-webhook-server-cert" not found Dec 05 06:08:42 crc kubenswrapper[4742]: I1205 06:08:42.120923 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/96e75197-2f06-41ef-acca-0752e684ab72-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879fngsq9\" (UID: \"96e75197-2f06-41ef-acca-0752e684ab72\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fngsq9" Dec 05 06:08:42 crc kubenswrapper[4742]: E1205 06:08:42.121109 4742 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 06:08:42 crc kubenswrapper[4742]: E1205 06:08:42.121191 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/96e75197-2f06-41ef-acca-0752e684ab72-cert podName:96e75197-2f06-41ef-acca-0752e684ab72 nodeName:}" failed. No retries permitted until 2025-12-05 06:08:50.1211717 +0000 UTC m=+1006.033306762 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/96e75197-2f06-41ef-acca-0752e684ab72-cert") pod "openstack-baremetal-operator-controller-manager-84b575879fngsq9" (UID: "96e75197-2f06-41ef-acca-0752e684ab72") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 06:08:42 crc kubenswrapper[4742]: I1205 06:08:42.639406 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-webhook-certs\") pod \"openstack-operator-controller-manager-6f6696b64-d7l2w\" (UID: \"1b690049-7bae-4629-8183-02c87c0fe640\") " pod="openstack-operators/openstack-operator-controller-manager-6f6696b64-d7l2w" Dec 05 06:08:42 crc kubenswrapper[4742]: I1205 06:08:42.639571 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-metrics-certs\") pod \"openstack-operator-controller-manager-6f6696b64-d7l2w\" (UID: \"1b690049-7bae-4629-8183-02c87c0fe640\") " pod="openstack-operators/openstack-operator-controller-manager-6f6696b64-d7l2w" Dec 05 06:08:42 crc kubenswrapper[4742]: E1205 06:08:42.639629 4742 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 06:08:42 crc kubenswrapper[4742]: E1205 06:08:42.639708 4742 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 06:08:42 crc kubenswrapper[4742]: E1205 06:08:42.639726 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-webhook-certs podName:1b690049-7bae-4629-8183-02c87c0fe640 nodeName:}" failed. No retries permitted until 2025-12-05 06:08:50.639702998 +0000 UTC m=+1006.551838140 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-webhook-certs") pod "openstack-operator-controller-manager-6f6696b64-d7l2w" (UID: "1b690049-7bae-4629-8183-02c87c0fe640") : secret "webhook-server-cert" not found Dec 05 06:08:42 crc kubenswrapper[4742]: E1205 06:08:42.639768 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-metrics-certs podName:1b690049-7bae-4629-8183-02c87c0fe640 nodeName:}" failed. No retries permitted until 2025-12-05 06:08:50.639750059 +0000 UTC m=+1006.551885131 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-metrics-certs") pod "openstack-operator-controller-manager-6f6696b64-d7l2w" (UID: "1b690049-7bae-4629-8183-02c87c0fe640") : secret "metrics-server-cert" not found Dec 05 06:08:48 crc kubenswrapper[4742]: E1205 06:08:48.208398 4742 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59" Dec 05 06:08:48 crc kubenswrapper[4742]: E1205 06:08:48.208860 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-n7825,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-ml2jd_openstack-operators(7296ffb7-3049-44eb-80d1-850817ee1fac): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 06:08:48 crc kubenswrapper[4742]: E1205 06:08:48.861078 4742 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/glance-operator@sha256:5370dc4a8e776923eec00bb50cbdb2e390e9dde50be26bdc04a216bd2d6b5027" Dec 05 06:08:48 crc kubenswrapper[4742]: E1205 06:08:48.861548 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:5370dc4a8e776923eec00bb50cbdb2e390e9dde50be26bdc04a216bd2d6b5027,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-l4qb6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-5697bb5779-2c5fr_openstack-operators(c57dd655-4793-45cd-9e28-ebf4793af611): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 06:08:49 crc kubenswrapper[4742]: E1205 06:08:49.530133 4742 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/designate-operator@sha256:900050d3501c0785b227db34b89883efe68247816e5c7427cacb74f8aa10605a" Dec 05 06:08:49 crc kubenswrapper[4742]: E1205 06:08:49.530302 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/designate-operator@sha256:900050d3501c0785b227db34b89883efe68247816e5c7427cacb74f8aa10605a,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wd9s4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-697fb699cf-dp5sb_openstack-operators(55a3d509-dd87-42fb-be01-6cdd6ffcc70c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 06:08:49 crc kubenswrapper[4742]: I1205 06:08:49.852024 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9d7f230e-fc9c-46a0-b31f-2b0772107ebb-cert\") pod \"infra-operator-controller-manager-758b7cbd9c-4bwlv\" (UID: \"9d7f230e-fc9c-46a0-b31f-2b0772107ebb\") " pod="openstack-operators/infra-operator-controller-manager-758b7cbd9c-4bwlv" Dec 05 06:08:49 crc kubenswrapper[4742]: I1205 06:08:49.858769 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9d7f230e-fc9c-46a0-b31f-2b0772107ebb-cert\") pod \"infra-operator-controller-manager-758b7cbd9c-4bwlv\" (UID: \"9d7f230e-fc9c-46a0-b31f-2b0772107ebb\") " pod="openstack-operators/infra-operator-controller-manager-758b7cbd9c-4bwlv" Dec 05 06:08:50 crc kubenswrapper[4742]: I1205 06:08:50.084194 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-758b7cbd9c-4bwlv" Dec 05 06:08:50 crc kubenswrapper[4742]: I1205 06:08:50.156126 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/96e75197-2f06-41ef-acca-0752e684ab72-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879fngsq9\" (UID: \"96e75197-2f06-41ef-acca-0752e684ab72\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fngsq9" Dec 05 06:08:50 crc kubenswrapper[4742]: I1205 06:08:50.177781 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/96e75197-2f06-41ef-acca-0752e684ab72-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879fngsq9\" (UID: \"96e75197-2f06-41ef-acca-0752e684ab72\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fngsq9" Dec 05 06:08:50 crc kubenswrapper[4742]: E1205 06:08:50.387554 4742 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557" Dec 05 06:08:50 crc kubenswrapper[4742]: E1205 06:08:50.387716 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5x752,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5fdfd5b6b5-2qgvf_openstack-operators(87b6fb22-4077-4dfa-a66c-10ef740b542c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 06:08:50 crc kubenswrapper[4742]: I1205 06:08:50.467573 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fngsq9" Dec 05 06:08:50 crc kubenswrapper[4742]: I1205 06:08:50.663236 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-metrics-certs\") pod \"openstack-operator-controller-manager-6f6696b64-d7l2w\" (UID: \"1b690049-7bae-4629-8183-02c87c0fe640\") " pod="openstack-operators/openstack-operator-controller-manager-6f6696b64-d7l2w" Dec 05 06:08:50 crc kubenswrapper[4742]: I1205 06:08:50.663326 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-webhook-certs\") pod \"openstack-operator-controller-manager-6f6696b64-d7l2w\" (UID: \"1b690049-7bae-4629-8183-02c87c0fe640\") " pod="openstack-operators/openstack-operator-controller-manager-6f6696b64-d7l2w" Dec 05 06:08:50 crc kubenswrapper[4742]: I1205 06:08:50.666640 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-metrics-certs\") pod \"openstack-operator-controller-manager-6f6696b64-d7l2w\" (UID: \"1b690049-7bae-4629-8183-02c87c0fe640\") " pod="openstack-operators/openstack-operator-controller-manager-6f6696b64-d7l2w" Dec 05 06:08:50 crc kubenswrapper[4742]: I1205 06:08:50.671721 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1b690049-7bae-4629-8183-02c87c0fe640-webhook-certs\") pod \"openstack-operator-controller-manager-6f6696b64-d7l2w\" (UID: \"1b690049-7bae-4629-8183-02c87c0fe640\") " pod="openstack-operators/openstack-operator-controller-manager-6f6696b64-d7l2w" Dec 05 06:08:50 crc kubenswrapper[4742]: I1205 06:08:50.967286 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-6f6696b64-d7l2w" Dec 05 06:08:57 crc kubenswrapper[4742]: E1205 06:08:57.424382 4742 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670" Dec 05 06:08:57 crc kubenswrapper[4742]: E1205 06:08:57.424945 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-7r9rl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-mq7jm_openstack-operators(dbf44717-3f12-426c-9133-ef0dd76cea1a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 06:08:57 crc kubenswrapper[4742]: E1205 06:08:57.437648 4742 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7" Dec 05 06:08:57 crc kubenswrapper[4742]: E1205 06:08:57.437786 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-j68b9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-kbvkg_openstack-operators(b9dba9a5-804f-4b60-9e89-0e9dfeba1d44): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 06:09:01 crc kubenswrapper[4742]: I1205 06:09:01.516000 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-758b7cbd9c-4bwlv"] Dec 05 06:09:01 crc kubenswrapper[4742]: I1205 06:09:01.523170 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fngsq9"] Dec 05 06:09:01 crc kubenswrapper[4742]: I1205 06:09:01.647989 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6f6696b64-d7l2w"] Dec 05 06:09:01 crc kubenswrapper[4742]: W1205 06:09:01.880981 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod96e75197_2f06_41ef_acca_0752e684ab72.slice/crio-c963e7f8dec2e59ffb1336f21c5884f1392e23d9712a4cef5249cda7b71cfaee WatchSource:0}: Error finding container c963e7f8dec2e59ffb1336f21c5884f1392e23d9712a4cef5249cda7b71cfaee: Status 404 returned error can't find the container with id c963e7f8dec2e59ffb1336f21c5884f1392e23d9712a4cef5249cda7b71cfaee Dec 05 06:09:01 crc kubenswrapper[4742]: W1205 06:09:01.882877 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d7f230e_fc9c_46a0_b31f_2b0772107ebb.slice/crio-d3d802b005a284be6658ceca9d0a0ec4f65763ab37ee360f6f47a657c670a1a6 WatchSource:0}: Error finding container d3d802b005a284be6658ceca9d0a0ec4f65763ab37ee360f6f47a657c670a1a6: Status 404 returned error can't find the container with id d3d802b005a284be6658ceca9d0a0ec4f65763ab37ee360f6f47a657c670a1a6 Dec 05 06:09:02 crc kubenswrapper[4742]: E1205 06:09:02.201343 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lknfd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-58d5ff84df-tqxd6_openstack-operators(1b8c3dc7-6086-4c81-a1f7-a9bea62a8a4f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 06:09:02 crc kubenswrapper[4742]: E1205 06:09:02.203526 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-tqxd6" podUID="1b8c3dc7-6086-4c81-a1f7-a9bea62a8a4f" Dec 05 06:09:02 crc kubenswrapper[4742]: I1205 06:09:02.255858 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fngsq9" event={"ID":"96e75197-2f06-41ef-acca-0752e684ab72","Type":"ContainerStarted","Data":"c963e7f8dec2e59ffb1336f21c5884f1392e23d9712a4cef5249cda7b71cfaee"} Dec 05 06:09:02 crc kubenswrapper[4742]: I1205 06:09:02.262792 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-9nxmx" event={"ID":"030626ef-00d4-4b99-b629-0b25c15c2c55","Type":"ContainerStarted","Data":"70e33e11dab028ead9ab6f10f47c352bfe2e7e9eeda2eed66c152fcf6d78b610"} Dec 05 06:09:02 crc kubenswrapper[4742]: I1205 06:09:02.268915 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-6f6696b64-d7l2w" event={"ID":"1b690049-7bae-4629-8183-02c87c0fe640","Type":"ContainerStarted","Data":"dd6645b721c707b68bfd774b70c55598c4fd0a00b608f02191315d6d857dd100"} Dec 05 06:09:02 crc kubenswrapper[4742]: I1205 06:09:02.270332 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-967d97867-d77rg" event={"ID":"d8579acb-f382-474b-94ae-86a304ddcaec","Type":"ContainerStarted","Data":"7e4ed9aba14ca887b1e983d5da5d7890a90b18525fabca06c8736ae1666d3448"} Dec 05 06:09:02 crc kubenswrapper[4742]: I1205 06:09:02.275169 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-tqxd6" event={"ID":"1b8c3dc7-6086-4c81-a1f7-a9bea62a8a4f","Type":"ContainerStarted","Data":"c4d70bea2366f070eb4a5e22fdc13a1b93ab72ee530ba74c7632db8f9f417c38"} Dec 05 06:09:02 crc kubenswrapper[4742]: I1205 06:09:02.275865 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-tqxd6" Dec 05 06:09:02 crc kubenswrapper[4742]: E1205 06:09:02.277375 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-tqxd6" podUID="1b8c3dc7-6086-4c81-a1f7-a9bea62a8a4f" Dec 05 06:09:02 crc kubenswrapper[4742]: I1205 06:09:02.282680 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-xxqrg" event={"ID":"47af7008-5488-4a6a-836a-602844f186c9","Type":"ContainerStarted","Data":"b735221e55389accbca0518167e0b99b3cc38a8f0d3ff432746da4477650d6a8"} Dec 05 06:09:02 crc kubenswrapper[4742]: I1205 06:09:02.288522 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-758b7cbd9c-4bwlv" event={"ID":"9d7f230e-fc9c-46a0-b31f-2b0772107ebb","Type":"ContainerStarted","Data":"d3d802b005a284be6658ceca9d0a0ec4f65763ab37ee360f6f47a657c670a1a6"} Dec 05 06:09:02 crc kubenswrapper[4742]: I1205 06:09:02.293376 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-gmqkf" event={"ID":"491b9b94-2e41-4c0b-8286-6c7c8b460933","Type":"ContainerStarted","Data":"a6c8559881aae05584847bf923d8c67d96a85422981128f48733e1e19410e637"} Dec 05 06:09:02 crc kubenswrapper[4742]: I1205 06:09:02.295855 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-rc6hb" event={"ID":"47ef9cc3-82c3-4874-8fbf-9799bb2a8b4c","Type":"ContainerStarted","Data":"ab1fe99078c9a5849e6c6e3d9bc2e5eb93b3080f944ad753ed9fe63e8bad9ca9"} Dec 05 06:09:02 crc kubenswrapper[4742]: I1205 06:09:02.300641 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-8vrd9" event={"ID":"8b68f95e-f3d8-4e0e-a1a7-f5769e47f3b1","Type":"ContainerStarted","Data":"fabdff6d3bd11a7ceabc736fdd0b5ca9e843640bdc28948569b7e6f26816eb7a"} Dec 05 06:09:02 crc kubenswrapper[4742]: I1205 06:09:02.305922 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-2sqlp" event={"ID":"6b229432-3291-4696-bc76-eda16eda1a3d","Type":"ContainerStarted","Data":"aff4783f98cbc2347c90d710eea53a37dfcd7a5b0f7f51b8faa169ca9a2b0a3a"} Dec 05 06:09:02 crc kubenswrapper[4742]: I1205 06:09:02.312029 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-7t6zt" event={"ID":"d1b68a24-f581-4b06-a05a-be291467b34b","Type":"ContainerStarted","Data":"164b1f3d73077780932b67b4471254912e34121d33e678c4c0be100bbe66b2d8"} Dec 05 06:09:02 crc kubenswrapper[4742]: I1205 06:09:02.313107 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-4xvmq" event={"ID":"3cd456e8-3d67-43bb-9aaf-006acae0a913","Type":"ContainerStarted","Data":"6096dc3832b0f589a4212f26f5c1058f91c66e6f1c4e8248a40dfa5c50c38f9d"} Dec 05 06:09:02 crc kubenswrapper[4742]: I1205 06:09:02.318475 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-m2g95" event={"ID":"93299aa7-920e-4725-9546-1376e21f8652","Type":"ContainerStarted","Data":"12b1d262c2725bc326ff4b4ac80cfd37acba6957da88d4993b7464e80a8cebec"} Dec 05 06:09:03 crc kubenswrapper[4742]: I1205 06:09:03.328852 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-xb5zg" event={"ID":"961ed339-23e1-4d90-a5b9-f0fcdd73df76","Type":"ContainerStarted","Data":"36461837ceb21c7712f4cfb7d24a086d38672aa44b38ca0e122b781761ce4725"} Dec 05 06:09:03 crc kubenswrapper[4742]: I1205 06:09:03.333676 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-xvrrk" event={"ID":"80c34a09-8c71-40d2-828e-b5e416ca4e5d","Type":"ContainerStarted","Data":"4e6fbf746f65d20ccbc40843dc73a7720f601eb7a9c569c90999c05f5fee0e17"} Dec 05 06:09:03 crc kubenswrapper[4742]: I1205 06:09:03.334758 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-6f6696b64-d7l2w" event={"ID":"1b690049-7bae-4629-8183-02c87c0fe640","Type":"ContainerStarted","Data":"f6581e2e3da76d6ed91548f8fad6ad21a5935b386d221f195cac5650a059d223"} Dec 05 06:09:03 crc kubenswrapper[4742]: I1205 06:09:03.335082 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-6f6696b64-d7l2w" Dec 05 06:09:03 crc kubenswrapper[4742]: E1205 06:09:03.336187 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-tqxd6" podUID="1b8c3dc7-6086-4c81-a1f7-a9bea62a8a4f" Dec 05 06:09:03 crc kubenswrapper[4742]: I1205 06:09:03.363856 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-6f6696b64-d7l2w" podStartSLOduration=29.363841954 podStartE2EDuration="29.363841954s" podCreationTimestamp="2025-12-05 06:08:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:09:03.362101208 +0000 UTC m=+1019.274236270" watchObservedRunningTime="2025-12-05 06:09:03.363841954 +0000 UTC m=+1019.275977016" Dec 05 06:09:05 crc kubenswrapper[4742]: I1205 06:09:05.354328 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5nmbr" event={"ID":"a66da454-e3a7-436a-88d0-05bcf3e954eb","Type":"ContainerStarted","Data":"6721b364d9cefbc2f7b60fc00140fdd67f7c4d1325fc602adf9cbe36b65a7bf6"} Dec 05 06:09:05 crc kubenswrapper[4742]: I1205 06:09:05.371678 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-5nmbr" podStartSLOduration=6.011785789 podStartE2EDuration="31.371652267s" podCreationTimestamp="2025-12-05 06:08:34 +0000 UTC" firstStartedPulling="2025-12-05 06:08:35.972870625 +0000 UTC m=+991.885005687" lastFinishedPulling="2025-12-05 06:09:01.332737103 +0000 UTC m=+1017.244872165" observedRunningTime="2025-12-05 06:09:05.370472866 +0000 UTC m=+1021.282608008" watchObservedRunningTime="2025-12-05 06:09:05.371652267 +0000 UTC m=+1021.283787359" Dec 05 06:09:06 crc kubenswrapper[4742]: E1205 06:09:06.480480 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-ml2jd" podUID="7296ffb7-3049-44eb-80d1-850817ee1fac" Dec 05 06:09:06 crc kubenswrapper[4742]: E1205 06:09:06.679547 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-mq7jm" podUID="dbf44717-3f12-426c-9133-ef0dd76cea1a" Dec 05 06:09:06 crc kubenswrapper[4742]: E1205 06:09:06.685449 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-kbvkg" podUID="b9dba9a5-804f-4b60-9e89-0e9dfeba1d44" Dec 05 06:09:06 crc kubenswrapper[4742]: E1205 06:09:06.878705 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-dp5sb" podUID="55a3d509-dd87-42fb-be01-6cdd6ffcc70c" Dec 05 06:09:07 crc kubenswrapper[4742]: E1205 06:09:07.019477 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-2c5fr" podUID="c57dd655-4793-45cd-9e28-ebf4793af611" Dec 05 06:09:07 crc kubenswrapper[4742]: E1205 06:09:07.096180 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2qgvf" podUID="87b6fb22-4077-4dfa-a66c-10ef740b542c" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.377974 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-4xvmq" event={"ID":"3cd456e8-3d67-43bb-9aaf-006acae0a913","Type":"ContainerStarted","Data":"d8682056283d84897f47cd873b76873886abcc89b659b104de69cc024c61c84d"} Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.378200 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-4xvmq" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.379463 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-gmqkf" event={"ID":"491b9b94-2e41-4c0b-8286-6c7c8b460933","Type":"ContainerStarted","Data":"b015889c6e211c9dde5300d57aff0dcdca7d6fadd0e9253157f4563d8eb3fe8a"} Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.379957 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-gmqkf" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.380146 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-4xvmq" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.380941 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-gmqkf" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.382388 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-mq7jm" event={"ID":"dbf44717-3f12-426c-9133-ef0dd76cea1a","Type":"ContainerStarted","Data":"b9086e12c20643856e7a8fd93b2b8cff048cd55b0d1015e16ec11ab0740ac06c"} Dec 05 06:09:07 crc kubenswrapper[4742]: E1205 06:09:07.383377 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670\\\"\"" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-mq7jm" podUID="dbf44717-3f12-426c-9133-ef0dd76cea1a" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.384244 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-xxqrg" event={"ID":"47af7008-5488-4a6a-836a-602844f186c9","Type":"ContainerStarted","Data":"995e4d2a6799bbc28fac8777e75ff98bdc945de818d1926f806b85f57488333c"} Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.384370 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-xxqrg" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.385902 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-8vrd9" event={"ID":"8b68f95e-f3d8-4e0e-a1a7-f5769e47f3b1","Type":"ContainerStarted","Data":"eeda25304fc2bac03c361443d6a94d3481f745bf4381c58bc47a3021d6a2a73d"} Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.386026 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-8vrd9" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.387844 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-xxqrg" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.388368 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-rc6hb" event={"ID":"47ef9cc3-82c3-4874-8fbf-9799bb2a8b4c","Type":"ContainerStarted","Data":"8d61e61ec5e5202afb7fa661836e682352c279a8d729e004f7bf6b5e06865238"} Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.388589 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-rc6hb" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.389548 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-8vrd9" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.390053 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-758b7cbd9c-4bwlv" event={"ID":"9d7f230e-fc9c-46a0-b31f-2b0772107ebb","Type":"ContainerStarted","Data":"2cf41eda241e163ed0e4a7866936c057db37536bcd366e1746e2addd79a63b17"} Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.390117 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-758b7cbd9c-4bwlv" event={"ID":"9d7f230e-fc9c-46a0-b31f-2b0772107ebb","Type":"ContainerStarted","Data":"1087fb7c82635c48d5c974b55e461be33cd65168375bc9950c68ccb4e2cd4d86"} Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.390161 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-758b7cbd9c-4bwlv" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.391117 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-rc6hb" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.391369 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-dp5sb" event={"ID":"55a3d509-dd87-42fb-be01-6cdd6ffcc70c","Type":"ContainerStarted","Data":"641075cde89ca8b65f339c9a9b9e00a36232a8517fd8f54dd0ca1ff293aebd7b"} Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.392684 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-ml2jd" event={"ID":"7296ffb7-3049-44eb-80d1-850817ee1fac","Type":"ContainerStarted","Data":"1d10a0df390f80dc1c921b52fe466e92782c1fda8d0ddc0338a1fd670f878032"} Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.395186 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fngsq9" event={"ID":"96e75197-2f06-41ef-acca-0752e684ab72","Type":"ContainerStarted","Data":"a39a5149967e135169f5f9969f8efc159382c742c7b78d5581b4cf0c133d2458"} Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.395421 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fngsq9" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.395433 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fngsq9" event={"ID":"96e75197-2f06-41ef-acca-0752e684ab72","Type":"ContainerStarted","Data":"6f3926d62a3dae80ce15ac412ae9afe5254f31cf31f76b0fcb7504b732c13085"} Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.397543 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-9nxmx" event={"ID":"030626ef-00d4-4b99-b629-0b25c15c2c55","Type":"ContainerStarted","Data":"c77cd05fb7f28ce451e40fb90b26cc123125807744c78c553758c7ed5735f293"} Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.397943 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-9nxmx" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.399140 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-9nxmx" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.400869 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2qgvf" event={"ID":"87b6fb22-4077-4dfa-a66c-10ef740b542c","Type":"ContainerStarted","Data":"fde527c1ff97081e2e611100c41784414c3d270d2365281096243afb4c2fdfa5"} Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.403420 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-7t6zt" event={"ID":"d1b68a24-f581-4b06-a05a-be291467b34b","Type":"ContainerStarted","Data":"9c9dfe93390303ced56304d750d0a506e51cd8861ff19d4304a5ac39d15da1ad"} Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.404123 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-7t6zt" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.406647 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-7t6zt" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.408532 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-4xvmq" podStartSLOduration=3.842540021 podStartE2EDuration="34.408514718s" podCreationTimestamp="2025-12-05 06:08:33 +0000 UTC" firstStartedPulling="2025-12-05 06:08:35.539954648 +0000 UTC m=+991.452089710" lastFinishedPulling="2025-12-05 06:09:06.105929345 +0000 UTC m=+1022.018064407" observedRunningTime="2025-12-05 06:09:07.404983536 +0000 UTC m=+1023.317118598" watchObservedRunningTime="2025-12-05 06:09:07.408514718 +0000 UTC m=+1023.320649780" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.413011 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-2c5fr" event={"ID":"c57dd655-4793-45cd-9e28-ebf4793af611","Type":"ContainerStarted","Data":"e4d55a421dcfde0a838aa2e88aefd4e63fed28242ca5a01e79d56bbe02b632dc"} Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.433308 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-xb5zg" event={"ID":"961ed339-23e1-4d90-a5b9-f0fcdd73df76","Type":"ContainerStarted","Data":"a830b7bce1dba363cab2fc5c57bba4d53e19838bafb532b9200f979dd00ae78c"} Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.433498 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-xb5zg" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.440271 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-xb5zg" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.445364 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-kbvkg" event={"ID":"b9dba9a5-804f-4b60-9e89-0e9dfeba1d44","Type":"ContainerStarted","Data":"216d93727e6af3ef2247a504bf37ccf0b46d5d760c6d75228b518cb811d1a330"} Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.449932 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fngsq9" podStartSLOduration=30.24543964 podStartE2EDuration="34.449909977s" podCreationTimestamp="2025-12-05 06:08:33 +0000 UTC" firstStartedPulling="2025-12-05 06:09:01.883375767 +0000 UTC m=+1017.795510829" lastFinishedPulling="2025-12-05 06:09:06.087846104 +0000 UTC m=+1021.999981166" observedRunningTime="2025-12-05 06:09:07.437322159 +0000 UTC m=+1023.349457221" watchObservedRunningTime="2025-12-05 06:09:07.449909977 +0000 UTC m=+1023.362045029" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.464391 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-xvrrk" event={"ID":"80c34a09-8c71-40d2-828e-b5e416ca4e5d","Type":"ContainerStarted","Data":"9595cb535f0cb5b32ad0b48a1e8b9f9c9285db0f7ae616382d58b8bc7e3bca90"} Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.466459 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-xvrrk" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.470351 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-xvrrk" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.484317 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-967d97867-d77rg" event={"ID":"d8579acb-f382-474b-94ae-86a304ddcaec","Type":"ContainerStarted","Data":"8a92593017a80667712f7ea989726dffceb06949cffe4655d2c6ee2763824f32"} Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.486044 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-967d97867-d77rg" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.487867 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-967d97867-d77rg" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.514465 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-2sqlp" event={"ID":"6b229432-3291-4696-bc76-eda16eda1a3d","Type":"ContainerStarted","Data":"3c81aeb2b5a15e6bbf63c3a81a2aff146efb77374a05678dc26b02e9d7c22b9a"} Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.515411 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-2sqlp" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.518586 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-2sqlp" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.530815 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-m2g95" event={"ID":"93299aa7-920e-4725-9546-1376e21f8652","Type":"ContainerStarted","Data":"7bcd428aee9272e0670fd736b6746292343f2740a9585e296f0c25c5ab34ab21"} Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.531917 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-m2g95" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.533529 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-m2g95" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.540489 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-8vrd9" podStartSLOduration=4.077568223 podStartE2EDuration="34.540475846s" podCreationTimestamp="2025-12-05 06:08:33 +0000 UTC" firstStartedPulling="2025-12-05 06:08:35.696024663 +0000 UTC m=+991.608159725" lastFinishedPulling="2025-12-05 06:09:06.158932296 +0000 UTC m=+1022.071067348" observedRunningTime="2025-12-05 06:09:07.538227327 +0000 UTC m=+1023.450362389" watchObservedRunningTime="2025-12-05 06:09:07.540475846 +0000 UTC m=+1023.452610908" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.541103 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-758b7cbd9c-4bwlv" podStartSLOduration=30.344974962 podStartE2EDuration="34.541099242s" podCreationTimestamp="2025-12-05 06:08:33 +0000 UTC" firstStartedPulling="2025-12-05 06:09:01.886314783 +0000 UTC m=+1017.798449845" lastFinishedPulling="2025-12-05 06:09:06.082439063 +0000 UTC m=+1021.994574125" observedRunningTime="2025-12-05 06:09:07.511353387 +0000 UTC m=+1023.423488449" watchObservedRunningTime="2025-12-05 06:09:07.541099242 +0000 UTC m=+1023.453234304" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.668055 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-xxqrg" podStartSLOduration=4.14923928 podStartE2EDuration="34.668033959s" podCreationTimestamp="2025-12-05 06:08:33 +0000 UTC" firstStartedPulling="2025-12-05 06:08:35.627493168 +0000 UTC m=+991.539628230" lastFinishedPulling="2025-12-05 06:09:06.146287847 +0000 UTC m=+1022.058422909" observedRunningTime="2025-12-05 06:09:07.666202811 +0000 UTC m=+1023.578337903" watchObservedRunningTime="2025-12-05 06:09:07.668033959 +0000 UTC m=+1023.580169021" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.731014 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-7t6zt" podStartSLOduration=3.368461652 podStartE2EDuration="34.730997039s" podCreationTimestamp="2025-12-05 06:08:33 +0000 UTC" firstStartedPulling="2025-12-05 06:08:34.79644311 +0000 UTC m=+990.708578172" lastFinishedPulling="2025-12-05 06:09:06.158978497 +0000 UTC m=+1022.071113559" observedRunningTime="2025-12-05 06:09:07.730610349 +0000 UTC m=+1023.642745411" watchObservedRunningTime="2025-12-05 06:09:07.730997039 +0000 UTC m=+1023.643132101" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.753413 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-gmqkf" podStartSLOduration=3.381058699 podStartE2EDuration="34.753397842s" podCreationTimestamp="2025-12-05 06:08:33 +0000 UTC" firstStartedPulling="2025-12-05 06:08:34.673104777 +0000 UTC m=+990.585239839" lastFinishedPulling="2025-12-05 06:09:06.04544391 +0000 UTC m=+1021.957578982" observedRunningTime="2025-12-05 06:09:07.75099511 +0000 UTC m=+1023.663130182" watchObservedRunningTime="2025-12-05 06:09:07.753397842 +0000 UTC m=+1023.665532904" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.786500 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-9nxmx" podStartSLOduration=4.301977578 podStartE2EDuration="34.786482604s" podCreationTimestamp="2025-12-05 06:08:33 +0000 UTC" firstStartedPulling="2025-12-05 06:08:35.597882726 +0000 UTC m=+991.510017788" lastFinishedPulling="2025-12-05 06:09:06.082387752 +0000 UTC m=+1021.994522814" observedRunningTime="2025-12-05 06:09:07.781991077 +0000 UTC m=+1023.694126139" watchObservedRunningTime="2025-12-05 06:09:07.786482604 +0000 UTC m=+1023.698617666" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.807600 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-667bd8d554-rc6hb" podStartSLOduration=3.5085609509999998 podStartE2EDuration="33.807582234s" podCreationTimestamp="2025-12-05 06:08:34 +0000 UTC" firstStartedPulling="2025-12-05 06:08:35.92276332 +0000 UTC m=+991.834898382" lastFinishedPulling="2025-12-05 06:09:06.221784603 +0000 UTC m=+1022.133919665" observedRunningTime="2025-12-05 06:09:07.801733532 +0000 UTC m=+1023.713868594" watchObservedRunningTime="2025-12-05 06:09:07.807582234 +0000 UTC m=+1023.719717296" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.821228 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-967d97867-d77rg" podStartSLOduration=3.969062006 podStartE2EDuration="34.821198948s" podCreationTimestamp="2025-12-05 06:08:33 +0000 UTC" firstStartedPulling="2025-12-05 06:08:35.262569302 +0000 UTC m=+991.174704364" lastFinishedPulling="2025-12-05 06:09:06.114706244 +0000 UTC m=+1022.026841306" observedRunningTime="2025-12-05 06:09:07.820630173 +0000 UTC m=+1023.732765235" watchObservedRunningTime="2025-12-05 06:09:07.821198948 +0000 UTC m=+1023.733334010" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.845704 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-xvrrk" podStartSLOduration=3.44944038 podStartE2EDuration="33.845688786s" podCreationTimestamp="2025-12-05 06:08:34 +0000 UTC" firstStartedPulling="2025-12-05 06:08:35.726759004 +0000 UTC m=+991.638894066" lastFinishedPulling="2025-12-05 06:09:06.12300741 +0000 UTC m=+1022.035142472" observedRunningTime="2025-12-05 06:09:07.841668881 +0000 UTC m=+1023.753803943" watchObservedRunningTime="2025-12-05 06:09:07.845688786 +0000 UTC m=+1023.757823848" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.920464 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-2sqlp" podStartSLOduration=4.04291299 podStartE2EDuration="34.920444013s" podCreationTimestamp="2025-12-05 06:08:33 +0000 UTC" firstStartedPulling="2025-12-05 06:08:35.262241984 +0000 UTC m=+991.174377046" lastFinishedPulling="2025-12-05 06:09:06.139773007 +0000 UTC m=+1022.051908069" observedRunningTime="2025-12-05 06:09:07.914491318 +0000 UTC m=+1023.826626380" watchObservedRunningTime="2025-12-05 06:09:07.920444013 +0000 UTC m=+1023.832579075" Dec 05 06:09:07 crc kubenswrapper[4742]: I1205 06:09:07.947579 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-m2g95" podStartSLOduration=4.231912974 podStartE2EDuration="34.947560259s" podCreationTimestamp="2025-12-05 06:08:33 +0000 UTC" firstStartedPulling="2025-12-05 06:08:35.329792324 +0000 UTC m=+991.241927386" lastFinishedPulling="2025-12-05 06:09:06.045439599 +0000 UTC m=+1021.957574671" observedRunningTime="2025-12-05 06:09:07.946325807 +0000 UTC m=+1023.858460879" watchObservedRunningTime="2025-12-05 06:09:07.947560259 +0000 UTC m=+1023.859695322" Dec 05 06:09:08 crc kubenswrapper[4742]: I1205 06:09:08.002391 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-xb5zg" podStartSLOduration=3.730428099 podStartE2EDuration="34.002362787s" podCreationTimestamp="2025-12-05 06:08:34 +0000 UTC" firstStartedPulling="2025-12-05 06:08:35.839989833 +0000 UTC m=+991.752124895" lastFinishedPulling="2025-12-05 06:09:06.111924511 +0000 UTC m=+1022.024059583" observedRunningTime="2025-12-05 06:09:07.99748156 +0000 UTC m=+1023.909616622" watchObservedRunningTime="2025-12-05 06:09:08.002362787 +0000 UTC m=+1023.914497839" Dec 05 06:09:08 crc kubenswrapper[4742]: I1205 06:09:08.559199 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2qgvf" event={"ID":"87b6fb22-4077-4dfa-a66c-10ef740b542c","Type":"ContainerStarted","Data":"b35fb33432be4db63c510692032300ee52dc804a73038b05278baceed7594a90"} Dec 05 06:09:08 crc kubenswrapper[4742]: I1205 06:09:08.560770 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2qgvf" Dec 05 06:09:08 crc kubenswrapper[4742]: I1205 06:09:08.566332 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-dp5sb" event={"ID":"55a3d509-dd87-42fb-be01-6cdd6ffcc70c","Type":"ContainerStarted","Data":"0f31aa481679a30b9df1e34eaf8c316c44ab69c7b3a37585581ccef05f144ac3"} Dec 05 06:09:08 crc kubenswrapper[4742]: I1205 06:09:08.566969 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-dp5sb" Dec 05 06:09:08 crc kubenswrapper[4742]: I1205 06:09:08.576370 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-2c5fr" event={"ID":"c57dd655-4793-45cd-9e28-ebf4793af611","Type":"ContainerStarted","Data":"192d8a7e1441e65f12710848aa6dd0abbd26d4dac09a17881ab0a82f7a075e54"} Dec 05 06:09:08 crc kubenswrapper[4742]: I1205 06:09:08.577027 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-2c5fr" Dec 05 06:09:08 crc kubenswrapper[4742]: I1205 06:09:08.592138 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2qgvf" podStartSLOduration=3.406612945 podStartE2EDuration="35.592120271s" podCreationTimestamp="2025-12-05 06:08:33 +0000 UTC" firstStartedPulling="2025-12-05 06:08:35.62795392 +0000 UTC m=+991.540088972" lastFinishedPulling="2025-12-05 06:09:07.813461236 +0000 UTC m=+1023.725596298" observedRunningTime="2025-12-05 06:09:08.588599999 +0000 UTC m=+1024.500735071" watchObservedRunningTime="2025-12-05 06:09:08.592120271 +0000 UTC m=+1024.504255333" Dec 05 06:09:08 crc kubenswrapper[4742]: I1205 06:09:08.597776 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-ml2jd" event={"ID":"7296ffb7-3049-44eb-80d1-850817ee1fac","Type":"ContainerStarted","Data":"8f3e272f629ec3ce5653caba96f2eacacd26281913e8bd88b67c3e0806dd728c"} Dec 05 06:09:08 crc kubenswrapper[4742]: I1205 06:09:08.597919 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-ml2jd" Dec 05 06:09:08 crc kubenswrapper[4742]: I1205 06:09:08.600736 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-kbvkg" event={"ID":"b9dba9a5-804f-4b60-9e89-0e9dfeba1d44","Type":"ContainerStarted","Data":"a1c821db0a52751ee1b74da3d97a5aa93f090f591cace83b1098f6884e73825b"} Dec 05 06:09:08 crc kubenswrapper[4742]: I1205 06:09:08.623790 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-dp5sb" podStartSLOduration=2.555971535 podStartE2EDuration="35.623768015s" podCreationTimestamp="2025-12-05 06:08:33 +0000 UTC" firstStartedPulling="2025-12-05 06:08:34.970669258 +0000 UTC m=+990.882804320" lastFinishedPulling="2025-12-05 06:09:08.038465748 +0000 UTC m=+1023.950600800" observedRunningTime="2025-12-05 06:09:08.613927419 +0000 UTC m=+1024.526062481" watchObservedRunningTime="2025-12-05 06:09:08.623768015 +0000 UTC m=+1024.535903087" Dec 05 06:09:08 crc kubenswrapper[4742]: I1205 06:09:08.634782 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-2c5fr" podStartSLOduration=2.994049397 podStartE2EDuration="35.634768321s" podCreationTimestamp="2025-12-05 06:08:33 +0000 UTC" firstStartedPulling="2025-12-05 06:08:35.174150839 +0000 UTC m=+991.086285901" lastFinishedPulling="2025-12-05 06:09:07.814869763 +0000 UTC m=+1023.727004825" observedRunningTime="2025-12-05 06:09:08.632434991 +0000 UTC m=+1024.544570063" watchObservedRunningTime="2025-12-05 06:09:08.634768321 +0000 UTC m=+1024.546903383" Dec 05 06:09:08 crc kubenswrapper[4742]: I1205 06:09:08.655557 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-ml2jd" podStartSLOduration=3.1660853700000002 podStartE2EDuration="35.655535143s" podCreationTimestamp="2025-12-05 06:08:33 +0000 UTC" firstStartedPulling="2025-12-05 06:08:35.576337756 +0000 UTC m=+991.488472818" lastFinishedPulling="2025-12-05 06:09:08.065787529 +0000 UTC m=+1023.977922591" observedRunningTime="2025-12-05 06:09:08.651740024 +0000 UTC m=+1024.563875086" watchObservedRunningTime="2025-12-05 06:09:08.655535143 +0000 UTC m=+1024.567670205" Dec 05 06:09:08 crc kubenswrapper[4742]: I1205 06:09:08.687971 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-kbvkg" podStartSLOduration=3.072498572 podStartE2EDuration="35.687953867s" podCreationTimestamp="2025-12-05 06:08:33 +0000 UTC" firstStartedPulling="2025-12-05 06:08:35.383198265 +0000 UTC m=+991.295333327" lastFinishedPulling="2025-12-05 06:09:07.99865356 +0000 UTC m=+1023.910788622" observedRunningTime="2025-12-05 06:09:08.684154638 +0000 UTC m=+1024.596289700" watchObservedRunningTime="2025-12-05 06:09:08.687953867 +0000 UTC m=+1024.600088919" Dec 05 06:09:09 crc kubenswrapper[4742]: I1205 06:09:09.609229 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-mq7jm" event={"ID":"dbf44717-3f12-426c-9133-ef0dd76cea1a","Type":"ContainerStarted","Data":"6a6d7df0fcc46925567798d199d160a0333b5f01293afbd72a69bb33a7d1b059"} Dec 05 06:09:09 crc kubenswrapper[4742]: I1205 06:09:09.611747 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-kbvkg" Dec 05 06:09:10 crc kubenswrapper[4742]: I1205 06:09:10.974686 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-6f6696b64-d7l2w" Dec 05 06:09:11 crc kubenswrapper[4742]: I1205 06:09:11.013214 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-mq7jm" podStartSLOduration=4.665660154 podStartE2EDuration="38.013186311s" podCreationTimestamp="2025-12-05 06:08:33 +0000 UTC" firstStartedPulling="2025-12-05 06:08:35.634590263 +0000 UTC m=+991.546725315" lastFinishedPulling="2025-12-05 06:09:08.98211641 +0000 UTC m=+1024.894251472" observedRunningTime="2025-12-05 06:09:09.641869907 +0000 UTC m=+1025.554004979" watchObservedRunningTime="2025-12-05 06:09:11.013186311 +0000 UTC m=+1026.925321413" Dec 05 06:09:14 crc kubenswrapper[4742]: I1205 06:09:14.046763 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-dp5sb" Dec 05 06:09:14 crc kubenswrapper[4742]: I1205 06:09:14.083988 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-2c5fr" Dec 05 06:09:14 crc kubenswrapper[4742]: I1205 06:09:14.320958 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-kbvkg" Dec 05 06:09:14 crc kubenswrapper[4742]: I1205 06:09:14.456928 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-2qgvf" Dec 05 06:09:14 crc kubenswrapper[4742]: I1205 06:09:14.499085 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-mq7jm" Dec 05 06:09:14 crc kubenswrapper[4742]: I1205 06:09:14.502472 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-mq7jm" Dec 05 06:09:14 crc kubenswrapper[4742]: I1205 06:09:14.636203 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-ml2jd" Dec 05 06:09:14 crc kubenswrapper[4742]: I1205 06:09:14.800221 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-tqxd6" Dec 05 06:09:15 crc kubenswrapper[4742]: I1205 06:09:15.657923 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-tqxd6" event={"ID":"1b8c3dc7-6086-4c81-a1f7-a9bea62a8a4f","Type":"ContainerStarted","Data":"633aab4280b8a280896f7c16bc57bd41776d43362e08b970040ec313577d84b8"} Dec 05 06:09:15 crc kubenswrapper[4742]: I1205 06:09:15.686363 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-tqxd6" podStartSLOduration=16.30038558 podStartE2EDuration="41.686339897s" podCreationTimestamp="2025-12-05 06:08:34 +0000 UTC" firstStartedPulling="2025-12-05 06:08:35.838744511 +0000 UTC m=+991.750879573" lastFinishedPulling="2025-12-05 06:09:01.224698828 +0000 UTC m=+1017.136833890" observedRunningTime="2025-12-05 06:09:15.683811511 +0000 UTC m=+1031.595946613" watchObservedRunningTime="2025-12-05 06:09:15.686339897 +0000 UTC m=+1031.598474969" Dec 05 06:09:20 crc kubenswrapper[4742]: I1205 06:09:20.091490 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-758b7cbd9c-4bwlv" Dec 05 06:09:20 crc kubenswrapper[4742]: I1205 06:09:20.476710 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879fngsq9" Dec 05 06:09:37 crc kubenswrapper[4742]: I1205 06:09:37.305629 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-k6klm"] Dec 05 06:09:37 crc kubenswrapper[4742]: I1205 06:09:37.307551 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-k6klm" Dec 05 06:09:37 crc kubenswrapper[4742]: I1205 06:09:37.310826 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-tgnm7" Dec 05 06:09:37 crc kubenswrapper[4742]: I1205 06:09:37.310913 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 05 06:09:37 crc kubenswrapper[4742]: I1205 06:09:37.311361 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 05 06:09:37 crc kubenswrapper[4742]: I1205 06:09:37.311418 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 05 06:09:37 crc kubenswrapper[4742]: I1205 06:09:37.319728 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-k6klm"] Dec 05 06:09:37 crc kubenswrapper[4742]: I1205 06:09:37.383960 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-hqv9s"] Dec 05 06:09:37 crc kubenswrapper[4742]: I1205 06:09:37.385880 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-hqv9s" Dec 05 06:09:37 crc kubenswrapper[4742]: I1205 06:09:37.397159 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 05 06:09:37 crc kubenswrapper[4742]: I1205 06:09:37.407812 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-hqv9s"] Dec 05 06:09:37 crc kubenswrapper[4742]: I1205 06:09:37.445577 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sp9px\" (UniqueName: \"kubernetes.io/projected/c8b5571b-2c4d-4d16-9b15-78fb3560c257-kube-api-access-sp9px\") pod \"dnsmasq-dns-675f4bcbfc-k6klm\" (UID: \"c8b5571b-2c4d-4d16-9b15-78fb3560c257\") " pod="openstack/dnsmasq-dns-675f4bcbfc-k6klm" Dec 05 06:09:37 crc kubenswrapper[4742]: I1205 06:09:37.445642 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8b5571b-2c4d-4d16-9b15-78fb3560c257-config\") pod \"dnsmasq-dns-675f4bcbfc-k6klm\" (UID: \"c8b5571b-2c4d-4d16-9b15-78fb3560c257\") " pod="openstack/dnsmasq-dns-675f4bcbfc-k6klm" Dec 05 06:09:37 crc kubenswrapper[4742]: I1205 06:09:37.547297 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b4bd57b-6c52-4568-93c1-7679338bf8b0-config\") pod \"dnsmasq-dns-78dd6ddcc-hqv9s\" (UID: \"5b4bd57b-6c52-4568-93c1-7679338bf8b0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-hqv9s" Dec 05 06:09:37 crc kubenswrapper[4742]: I1205 06:09:37.547454 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftsqx\" (UniqueName: \"kubernetes.io/projected/5b4bd57b-6c52-4568-93c1-7679338bf8b0-kube-api-access-ftsqx\") pod \"dnsmasq-dns-78dd6ddcc-hqv9s\" (UID: \"5b4bd57b-6c52-4568-93c1-7679338bf8b0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-hqv9s" Dec 05 06:09:37 crc kubenswrapper[4742]: I1205 06:09:37.547593 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sp9px\" (UniqueName: \"kubernetes.io/projected/c8b5571b-2c4d-4d16-9b15-78fb3560c257-kube-api-access-sp9px\") pod \"dnsmasq-dns-675f4bcbfc-k6klm\" (UID: \"c8b5571b-2c4d-4d16-9b15-78fb3560c257\") " pod="openstack/dnsmasq-dns-675f4bcbfc-k6klm" Dec 05 06:09:37 crc kubenswrapper[4742]: I1205 06:09:37.547697 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8b5571b-2c4d-4d16-9b15-78fb3560c257-config\") pod \"dnsmasq-dns-675f4bcbfc-k6klm\" (UID: \"c8b5571b-2c4d-4d16-9b15-78fb3560c257\") " pod="openstack/dnsmasq-dns-675f4bcbfc-k6klm" Dec 05 06:09:37 crc kubenswrapper[4742]: I1205 06:09:37.547804 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b4bd57b-6c52-4568-93c1-7679338bf8b0-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-hqv9s\" (UID: \"5b4bd57b-6c52-4568-93c1-7679338bf8b0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-hqv9s" Dec 05 06:09:37 crc kubenswrapper[4742]: I1205 06:09:37.549254 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8b5571b-2c4d-4d16-9b15-78fb3560c257-config\") pod \"dnsmasq-dns-675f4bcbfc-k6klm\" (UID: \"c8b5571b-2c4d-4d16-9b15-78fb3560c257\") " pod="openstack/dnsmasq-dns-675f4bcbfc-k6klm" Dec 05 06:09:37 crc kubenswrapper[4742]: I1205 06:09:37.566674 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sp9px\" (UniqueName: \"kubernetes.io/projected/c8b5571b-2c4d-4d16-9b15-78fb3560c257-kube-api-access-sp9px\") pod \"dnsmasq-dns-675f4bcbfc-k6klm\" (UID: \"c8b5571b-2c4d-4d16-9b15-78fb3560c257\") " pod="openstack/dnsmasq-dns-675f4bcbfc-k6klm" Dec 05 06:09:37 crc kubenswrapper[4742]: I1205 06:09:37.648889 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b4bd57b-6c52-4568-93c1-7679338bf8b0-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-hqv9s\" (UID: \"5b4bd57b-6c52-4568-93c1-7679338bf8b0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-hqv9s" Dec 05 06:09:37 crc kubenswrapper[4742]: I1205 06:09:37.649315 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b4bd57b-6c52-4568-93c1-7679338bf8b0-config\") pod \"dnsmasq-dns-78dd6ddcc-hqv9s\" (UID: \"5b4bd57b-6c52-4568-93c1-7679338bf8b0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-hqv9s" Dec 05 06:09:37 crc kubenswrapper[4742]: I1205 06:09:37.649363 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftsqx\" (UniqueName: \"kubernetes.io/projected/5b4bd57b-6c52-4568-93c1-7679338bf8b0-kube-api-access-ftsqx\") pod \"dnsmasq-dns-78dd6ddcc-hqv9s\" (UID: \"5b4bd57b-6c52-4568-93c1-7679338bf8b0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-hqv9s" Dec 05 06:09:37 crc kubenswrapper[4742]: I1205 06:09:37.649787 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b4bd57b-6c52-4568-93c1-7679338bf8b0-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-hqv9s\" (UID: \"5b4bd57b-6c52-4568-93c1-7679338bf8b0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-hqv9s" Dec 05 06:09:37 crc kubenswrapper[4742]: I1205 06:09:37.650557 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b4bd57b-6c52-4568-93c1-7679338bf8b0-config\") pod \"dnsmasq-dns-78dd6ddcc-hqv9s\" (UID: \"5b4bd57b-6c52-4568-93c1-7679338bf8b0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-hqv9s" Dec 05 06:09:37 crc kubenswrapper[4742]: I1205 06:09:37.673510 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-k6klm" Dec 05 06:09:37 crc kubenswrapper[4742]: I1205 06:09:37.674015 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftsqx\" (UniqueName: \"kubernetes.io/projected/5b4bd57b-6c52-4568-93c1-7679338bf8b0-kube-api-access-ftsqx\") pod \"dnsmasq-dns-78dd6ddcc-hqv9s\" (UID: \"5b4bd57b-6c52-4568-93c1-7679338bf8b0\") " pod="openstack/dnsmasq-dns-78dd6ddcc-hqv9s" Dec 05 06:09:37 crc kubenswrapper[4742]: I1205 06:09:37.704745 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-hqv9s" Dec 05 06:09:38 crc kubenswrapper[4742]: I1205 06:09:38.167738 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-k6klm"] Dec 05 06:09:38 crc kubenswrapper[4742]: I1205 06:09:38.176276 4742 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 06:09:38 crc kubenswrapper[4742]: I1205 06:09:38.240192 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-hqv9s"] Dec 05 06:09:38 crc kubenswrapper[4742]: I1205 06:09:38.870013 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-k6klm" event={"ID":"c8b5571b-2c4d-4d16-9b15-78fb3560c257","Type":"ContainerStarted","Data":"ac5cccea903608d4dcfb60a1d7388925cd824b189a74664535e878f91c444768"} Dec 05 06:09:38 crc kubenswrapper[4742]: I1205 06:09:38.871292 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-hqv9s" event={"ID":"5b4bd57b-6c52-4568-93c1-7679338bf8b0","Type":"ContainerStarted","Data":"32dea309050f6d07461c3de4afe23e2008889ed80f36b2e2cbc40022cf39f341"} Dec 05 06:09:40 crc kubenswrapper[4742]: I1205 06:09:40.365624 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-k6klm"] Dec 05 06:09:40 crc kubenswrapper[4742]: I1205 06:09:40.393852 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-2bzx4"] Dec 05 06:09:40 crc kubenswrapper[4742]: I1205 06:09:40.398430 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-2bzx4" Dec 05 06:09:40 crc kubenswrapper[4742]: I1205 06:09:40.418207 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-2bzx4"] Dec 05 06:09:40 crc kubenswrapper[4742]: I1205 06:09:40.491665 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7df0e37d-ee7e-4e3f-a797-703bc6b39545-config\") pod \"dnsmasq-dns-666b6646f7-2bzx4\" (UID: \"7df0e37d-ee7e-4e3f-a797-703bc6b39545\") " pod="openstack/dnsmasq-dns-666b6646f7-2bzx4" Dec 05 06:09:40 crc kubenswrapper[4742]: I1205 06:09:40.491788 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7df0e37d-ee7e-4e3f-a797-703bc6b39545-dns-svc\") pod \"dnsmasq-dns-666b6646f7-2bzx4\" (UID: \"7df0e37d-ee7e-4e3f-a797-703bc6b39545\") " pod="openstack/dnsmasq-dns-666b6646f7-2bzx4" Dec 05 06:09:40 crc kubenswrapper[4742]: I1205 06:09:40.491814 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cpc56\" (UniqueName: \"kubernetes.io/projected/7df0e37d-ee7e-4e3f-a797-703bc6b39545-kube-api-access-cpc56\") pod \"dnsmasq-dns-666b6646f7-2bzx4\" (UID: \"7df0e37d-ee7e-4e3f-a797-703bc6b39545\") " pod="openstack/dnsmasq-dns-666b6646f7-2bzx4" Dec 05 06:09:40 crc kubenswrapper[4742]: I1205 06:09:40.593471 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cpc56\" (UniqueName: \"kubernetes.io/projected/7df0e37d-ee7e-4e3f-a797-703bc6b39545-kube-api-access-cpc56\") pod \"dnsmasq-dns-666b6646f7-2bzx4\" (UID: \"7df0e37d-ee7e-4e3f-a797-703bc6b39545\") " pod="openstack/dnsmasq-dns-666b6646f7-2bzx4" Dec 05 06:09:40 crc kubenswrapper[4742]: I1205 06:09:40.593754 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7df0e37d-ee7e-4e3f-a797-703bc6b39545-dns-svc\") pod \"dnsmasq-dns-666b6646f7-2bzx4\" (UID: \"7df0e37d-ee7e-4e3f-a797-703bc6b39545\") " pod="openstack/dnsmasq-dns-666b6646f7-2bzx4" Dec 05 06:09:40 crc kubenswrapper[4742]: I1205 06:09:40.593844 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7df0e37d-ee7e-4e3f-a797-703bc6b39545-config\") pod \"dnsmasq-dns-666b6646f7-2bzx4\" (UID: \"7df0e37d-ee7e-4e3f-a797-703bc6b39545\") " pod="openstack/dnsmasq-dns-666b6646f7-2bzx4" Dec 05 06:09:40 crc kubenswrapper[4742]: I1205 06:09:40.594586 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7df0e37d-ee7e-4e3f-a797-703bc6b39545-config\") pod \"dnsmasq-dns-666b6646f7-2bzx4\" (UID: \"7df0e37d-ee7e-4e3f-a797-703bc6b39545\") " pod="openstack/dnsmasq-dns-666b6646f7-2bzx4" Dec 05 06:09:40 crc kubenswrapper[4742]: I1205 06:09:40.594624 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7df0e37d-ee7e-4e3f-a797-703bc6b39545-dns-svc\") pod \"dnsmasq-dns-666b6646f7-2bzx4\" (UID: \"7df0e37d-ee7e-4e3f-a797-703bc6b39545\") " pod="openstack/dnsmasq-dns-666b6646f7-2bzx4" Dec 05 06:09:40 crc kubenswrapper[4742]: I1205 06:09:40.625854 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cpc56\" (UniqueName: \"kubernetes.io/projected/7df0e37d-ee7e-4e3f-a797-703bc6b39545-kube-api-access-cpc56\") pod \"dnsmasq-dns-666b6646f7-2bzx4\" (UID: \"7df0e37d-ee7e-4e3f-a797-703bc6b39545\") " pod="openstack/dnsmasq-dns-666b6646f7-2bzx4" Dec 05 06:09:40 crc kubenswrapper[4742]: I1205 06:09:40.633095 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-hqv9s"] Dec 05 06:09:40 crc kubenswrapper[4742]: I1205 06:09:40.675583 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-89nxv"] Dec 05 06:09:40 crc kubenswrapper[4742]: I1205 06:09:40.676756 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-89nxv" Dec 05 06:09:40 crc kubenswrapper[4742]: I1205 06:09:40.687538 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-89nxv"] Dec 05 06:09:40 crc kubenswrapper[4742]: I1205 06:09:40.740416 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-2bzx4" Dec 05 06:09:40 crc kubenswrapper[4742]: I1205 06:09:40.799165 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2-config\") pod \"dnsmasq-dns-57d769cc4f-89nxv\" (UID: \"ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2\") " pod="openstack/dnsmasq-dns-57d769cc4f-89nxv" Dec 05 06:09:40 crc kubenswrapper[4742]: I1205 06:09:40.799267 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwln2\" (UniqueName: \"kubernetes.io/projected/ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2-kube-api-access-xwln2\") pod \"dnsmasq-dns-57d769cc4f-89nxv\" (UID: \"ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2\") " pod="openstack/dnsmasq-dns-57d769cc4f-89nxv" Dec 05 06:09:40 crc kubenswrapper[4742]: I1205 06:09:40.799293 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-89nxv\" (UID: \"ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2\") " pod="openstack/dnsmasq-dns-57d769cc4f-89nxv" Dec 05 06:09:40 crc kubenswrapper[4742]: I1205 06:09:40.900158 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2-config\") pod \"dnsmasq-dns-57d769cc4f-89nxv\" (UID: \"ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2\") " pod="openstack/dnsmasq-dns-57d769cc4f-89nxv" Dec 05 06:09:40 crc kubenswrapper[4742]: I1205 06:09:40.900233 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwln2\" (UniqueName: \"kubernetes.io/projected/ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2-kube-api-access-xwln2\") pod \"dnsmasq-dns-57d769cc4f-89nxv\" (UID: \"ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2\") " pod="openstack/dnsmasq-dns-57d769cc4f-89nxv" Dec 05 06:09:40 crc kubenswrapper[4742]: I1205 06:09:40.900251 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-89nxv\" (UID: \"ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2\") " pod="openstack/dnsmasq-dns-57d769cc4f-89nxv" Dec 05 06:09:40 crc kubenswrapper[4742]: I1205 06:09:40.903223 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2-config\") pod \"dnsmasq-dns-57d769cc4f-89nxv\" (UID: \"ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2\") " pod="openstack/dnsmasq-dns-57d769cc4f-89nxv" Dec 05 06:09:40 crc kubenswrapper[4742]: I1205 06:09:40.905451 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-89nxv\" (UID: \"ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2\") " pod="openstack/dnsmasq-dns-57d769cc4f-89nxv" Dec 05 06:09:40 crc kubenswrapper[4742]: I1205 06:09:40.936604 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwln2\" (UniqueName: \"kubernetes.io/projected/ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2-kube-api-access-xwln2\") pod \"dnsmasq-dns-57d769cc4f-89nxv\" (UID: \"ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2\") " pod="openstack/dnsmasq-dns-57d769cc4f-89nxv" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.004830 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-89nxv" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.241889 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-2bzx4"] Dec 05 06:09:41 crc kubenswrapper[4742]: W1205 06:09:41.244351 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7df0e37d_ee7e_4e3f_a797_703bc6b39545.slice/crio-b57b540de49dc6096b9202199340d4f4f5f2d89f4b6d1d74a0c469e188bd395d WatchSource:0}: Error finding container b57b540de49dc6096b9202199340d4f4f5f2d89f4b6d1d74a0c469e188bd395d: Status 404 returned error can't find the container with id b57b540de49dc6096b9202199340d4f4f5f2d89f4b6d1d74a0c469e188bd395d Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.509315 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.510724 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.512799 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.512967 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.515491 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.515620 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.515854 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.516097 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-7lkq8" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.516274 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.528921 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.594039 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-89nxv"] Dec 05 06:09:41 crc kubenswrapper[4742]: W1205 06:09:41.611460 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podba3490c6_4f5c_43fc_bb5a_f60ab3dc12c2.slice/crio-f2ae7112a101490a8573a89e68a26b07deaf551064bd1d3cdc4ff6824f104423 WatchSource:0}: Error finding container f2ae7112a101490a8573a89e68a26b07deaf551064bd1d3cdc4ff6824f104423: Status 404 returned error can't find the container with id f2ae7112a101490a8573a89e68a26b07deaf551064bd1d3cdc4ff6824f104423 Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.635357 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.635673 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdk76\" (UniqueName: \"kubernetes.io/projected/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-kube-api-access-cdk76\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.635722 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-config-data\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.635744 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.635759 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.635823 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.635844 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.635859 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.635931 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.635981 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.636022 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.737830 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdk76\" (UniqueName: \"kubernetes.io/projected/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-kube-api-access-cdk76\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.737878 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-config-data\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.737898 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.737913 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.737962 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.737986 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.738002 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.738018 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.738032 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.738164 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.738304 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.738694 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.738786 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.738798 4742 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.739465 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-config-data\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.739509 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.740586 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.745317 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.750761 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.750900 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.755086 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.755597 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdk76\" (UniqueName: \"kubernetes.io/projected/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-kube-api-access-cdk76\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.807755 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-server-0\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.819133 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.820652 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.826901 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.827313 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.827458 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.827559 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.827657 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.827759 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.827918 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.828064 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-rxbhq" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.845689 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.947146 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wd582\" (UniqueName: \"kubernetes.io/projected/7b5d8165-e06e-4600-9cab-9cf84c010725-kube-api-access-wd582\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.947204 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7b5d8165-e06e-4600-9cab-9cf84c010725-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.947234 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7b5d8165-e06e-4600-9cab-9cf84c010725-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.947252 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7b5d8165-e06e-4600-9cab-9cf84c010725-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.947280 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7b5d8165-e06e-4600-9cab-9cf84c010725-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.947293 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7b5d8165-e06e-4600-9cab-9cf84c010725-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.947315 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7b5d8165-e06e-4600-9cab-9cf84c010725-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.947330 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7b5d8165-e06e-4600-9cab-9cf84c010725-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.947350 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7b5d8165-e06e-4600-9cab-9cf84c010725-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.947373 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.947393 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7b5d8165-e06e-4600-9cab-9cf84c010725-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.980872 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-2bzx4" event={"ID":"7df0e37d-ee7e-4e3f-a797-703bc6b39545","Type":"ContainerStarted","Data":"b57b540de49dc6096b9202199340d4f4f5f2d89f4b6d1d74a0c469e188bd395d"} Dec 05 06:09:41 crc kubenswrapper[4742]: I1205 06:09:41.983920 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-89nxv" event={"ID":"ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2","Type":"ContainerStarted","Data":"f2ae7112a101490a8573a89e68a26b07deaf551064bd1d3cdc4ff6824f104423"} Dec 05 06:09:42 crc kubenswrapper[4742]: I1205 06:09:42.049109 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7b5d8165-e06e-4600-9cab-9cf84c010725-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:42 crc kubenswrapper[4742]: I1205 06:09:42.049143 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7b5d8165-e06e-4600-9cab-9cf84c010725-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:42 crc kubenswrapper[4742]: I1205 06:09:42.049165 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7b5d8165-e06e-4600-9cab-9cf84c010725-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:42 crc kubenswrapper[4742]: I1205 06:09:42.049181 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7b5d8165-e06e-4600-9cab-9cf84c010725-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:42 crc kubenswrapper[4742]: I1205 06:09:42.049203 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7b5d8165-e06e-4600-9cab-9cf84c010725-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:42 crc kubenswrapper[4742]: I1205 06:09:42.049232 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:42 crc kubenswrapper[4742]: I1205 06:09:42.049257 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7b5d8165-e06e-4600-9cab-9cf84c010725-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:42 crc kubenswrapper[4742]: I1205 06:09:42.049290 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wd582\" (UniqueName: \"kubernetes.io/projected/7b5d8165-e06e-4600-9cab-9cf84c010725-kube-api-access-wd582\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:42 crc kubenswrapper[4742]: I1205 06:09:42.049322 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7b5d8165-e06e-4600-9cab-9cf84c010725-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:42 crc kubenswrapper[4742]: I1205 06:09:42.049351 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7b5d8165-e06e-4600-9cab-9cf84c010725-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:42 crc kubenswrapper[4742]: I1205 06:09:42.049368 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7b5d8165-e06e-4600-9cab-9cf84c010725-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:42 crc kubenswrapper[4742]: I1205 06:09:42.050175 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7b5d8165-e06e-4600-9cab-9cf84c010725-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:42 crc kubenswrapper[4742]: I1205 06:09:42.050767 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7b5d8165-e06e-4600-9cab-9cf84c010725-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:42 crc kubenswrapper[4742]: I1205 06:09:42.053342 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7b5d8165-e06e-4600-9cab-9cf84c010725-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:42 crc kubenswrapper[4742]: I1205 06:09:42.053425 4742 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:42 crc kubenswrapper[4742]: I1205 06:09:42.054294 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7b5d8165-e06e-4600-9cab-9cf84c010725-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:42 crc kubenswrapper[4742]: I1205 06:09:42.055901 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7b5d8165-e06e-4600-9cab-9cf84c010725-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:42 crc kubenswrapper[4742]: I1205 06:09:42.056624 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7b5d8165-e06e-4600-9cab-9cf84c010725-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:42 crc kubenswrapper[4742]: I1205 06:09:42.057190 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7b5d8165-e06e-4600-9cab-9cf84c010725-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:42 crc kubenswrapper[4742]: I1205 06:09:42.057286 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7b5d8165-e06e-4600-9cab-9cf84c010725-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:42 crc kubenswrapper[4742]: I1205 06:09:42.058126 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7b5d8165-e06e-4600-9cab-9cf84c010725-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:42 crc kubenswrapper[4742]: I1205 06:09:42.078379 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wd582\" (UniqueName: \"kubernetes.io/projected/7b5d8165-e06e-4600-9cab-9cf84c010725-kube-api-access-wd582\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:42 crc kubenswrapper[4742]: I1205 06:09:42.086073 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:42 crc kubenswrapper[4742]: I1205 06:09:42.173953 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:09:42 crc kubenswrapper[4742]: I1205 06:09:42.406724 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 06:09:42 crc kubenswrapper[4742]: W1205 06:09:42.414510 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd6b096f4_483e_48c5_a3e1_a178c0c5ae6e.slice/crio-db1336eae52cbd8fcb3aa0e2dd712d4cb75e4737d79ab959c9a2d0267b82f8e4 WatchSource:0}: Error finding container db1336eae52cbd8fcb3aa0e2dd712d4cb75e4737d79ab959c9a2d0267b82f8e4: Status 404 returned error can't find the container with id db1336eae52cbd8fcb3aa0e2dd712d4cb75e4737d79ab959c9a2d0267b82f8e4 Dec 05 06:09:42 crc kubenswrapper[4742]: I1205 06:09:42.734269 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 06:09:42 crc kubenswrapper[4742]: W1205 06:09:42.745701 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7b5d8165_e06e_4600_9cab_9cf84c010725.slice/crio-048aa81f305557f2b2564b9bcfec1e2440cbf9794ec21c7ad3b2100a54b456cc WatchSource:0}: Error finding container 048aa81f305557f2b2564b9bcfec1e2440cbf9794ec21c7ad3b2100a54b456cc: Status 404 returned error can't find the container with id 048aa81f305557f2b2564b9bcfec1e2440cbf9794ec21c7ad3b2100a54b456cc Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.004379 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7b5d8165-e06e-4600-9cab-9cf84c010725","Type":"ContainerStarted","Data":"048aa81f305557f2b2564b9bcfec1e2440cbf9794ec21c7ad3b2100a54b456cc"} Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.023473 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e","Type":"ContainerStarted","Data":"db1336eae52cbd8fcb3aa0e2dd712d4cb75e4737d79ab959c9a2d0267b82f8e4"} Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.033599 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.041808 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.050751 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.055609 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.056041 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-hv8b9" Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.056239 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.058299 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.063077 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.174771 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c4ba4170-0240-42d9-85f4-cf3587f39f02-config-data-default\") pod \"openstack-galera-0\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") " pod="openstack/openstack-galera-0" Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.175761 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4ba4170-0240-42d9-85f4-cf3587f39f02-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") " pod="openstack/openstack-galera-0" Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.175789 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c4ba4170-0240-42d9-85f4-cf3587f39f02-kolla-config\") pod \"openstack-galera-0\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") " pod="openstack/openstack-galera-0" Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.175804 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4ba4170-0240-42d9-85f4-cf3587f39f02-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") " pod="openstack/openstack-galera-0" Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.175840 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbqk6\" (UniqueName: \"kubernetes.io/projected/c4ba4170-0240-42d9-85f4-cf3587f39f02-kube-api-access-kbqk6\") pod \"openstack-galera-0\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") " pod="openstack/openstack-galera-0" Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.175866 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c4ba4170-0240-42d9-85f4-cf3587f39f02-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") " pod="openstack/openstack-galera-0" Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.175896 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") " pod="openstack/openstack-galera-0" Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.175917 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4ba4170-0240-42d9-85f4-cf3587f39f02-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") " pod="openstack/openstack-galera-0" Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.277765 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4ba4170-0240-42d9-85f4-cf3587f39f02-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") " pod="openstack/openstack-galera-0" Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.277825 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c4ba4170-0240-42d9-85f4-cf3587f39f02-kolla-config\") pod \"openstack-galera-0\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") " pod="openstack/openstack-galera-0" Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.278678 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c4ba4170-0240-42d9-85f4-cf3587f39f02-kolla-config\") pod \"openstack-galera-0\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") " pod="openstack/openstack-galera-0" Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.278730 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4ba4170-0240-42d9-85f4-cf3587f39f02-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") " pod="openstack/openstack-galera-0" Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.278774 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbqk6\" (UniqueName: \"kubernetes.io/projected/c4ba4170-0240-42d9-85f4-cf3587f39f02-kube-api-access-kbqk6\") pod \"openstack-galera-0\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") " pod="openstack/openstack-galera-0" Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.278802 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c4ba4170-0240-42d9-85f4-cf3587f39f02-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") " pod="openstack/openstack-galera-0" Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.278843 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") " pod="openstack/openstack-galera-0" Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.278865 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4ba4170-0240-42d9-85f4-cf3587f39f02-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") " pod="openstack/openstack-galera-0" Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.278896 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c4ba4170-0240-42d9-85f4-cf3587f39f02-config-data-default\") pod \"openstack-galera-0\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") " pod="openstack/openstack-galera-0" Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.279647 4742 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/openstack-galera-0" Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.293709 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c4ba4170-0240-42d9-85f4-cf3587f39f02-config-data-default\") pod \"openstack-galera-0\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") " pod="openstack/openstack-galera-0" Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.294799 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4ba4170-0240-42d9-85f4-cf3587f39f02-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") " pod="openstack/openstack-galera-0" Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.296382 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c4ba4170-0240-42d9-85f4-cf3587f39f02-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") " pod="openstack/openstack-galera-0" Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.296524 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4ba4170-0240-42d9-85f4-cf3587f39f02-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") " pod="openstack/openstack-galera-0" Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.302404 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbqk6\" (UniqueName: \"kubernetes.io/projected/c4ba4170-0240-42d9-85f4-cf3587f39f02-kube-api-access-kbqk6\") pod \"openstack-galera-0\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") " pod="openstack/openstack-galera-0" Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.304774 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4ba4170-0240-42d9-85f4-cf3587f39f02-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") " pod="openstack/openstack-galera-0" Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.315716 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") " pod="openstack/openstack-galera-0" Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.408971 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 05 06:09:43 crc kubenswrapper[4742]: I1205 06:09:43.892663 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 06:09:43 crc kubenswrapper[4742]: W1205 06:09:43.904838 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc4ba4170_0240_42d9_85f4_cf3587f39f02.slice/crio-84b7e22e1ee9f346c9eedcc2067377b3434c6d31dc52ee00581e2a6a059af766 WatchSource:0}: Error finding container 84b7e22e1ee9f346c9eedcc2067377b3434c6d31dc52ee00581e2a6a059af766: Status 404 returned error can't find the container with id 84b7e22e1ee9f346c9eedcc2067377b3434c6d31dc52ee00581e2a6a059af766 Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.043729 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c4ba4170-0240-42d9-85f4-cf3587f39f02","Type":"ContainerStarted","Data":"84b7e22e1ee9f346c9eedcc2067377b3434c6d31dc52ee00581e2a6a059af766"} Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.301065 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.302496 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.305000 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-kjrtm" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.305309 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.305452 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.310224 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.328715 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.399546 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85632fad-1ab6-495e-9049-6b5dad9cc955-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") " pod="openstack/openstack-cell1-galera-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.399586 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/85632fad-1ab6-495e-9049-6b5dad9cc955-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") " pod="openstack/openstack-cell1-galera-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.399621 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5f6j\" (UniqueName: \"kubernetes.io/projected/85632fad-1ab6-495e-9049-6b5dad9cc955-kube-api-access-s5f6j\") pod \"openstack-cell1-galera-0\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") " pod="openstack/openstack-cell1-galera-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.399650 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85632fad-1ab6-495e-9049-6b5dad9cc955-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") " pod="openstack/openstack-cell1-galera-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.399671 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/85632fad-1ab6-495e-9049-6b5dad9cc955-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") " pod="openstack/openstack-cell1-galera-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.399699 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") " pod="openstack/openstack-cell1-galera-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.399720 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/85632fad-1ab6-495e-9049-6b5dad9cc955-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") " pod="openstack/openstack-cell1-galera-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.399769 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/85632fad-1ab6-495e-9049-6b5dad9cc955-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") " pod="openstack/openstack-cell1-galera-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.501678 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85632fad-1ab6-495e-9049-6b5dad9cc955-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") " pod="openstack/openstack-cell1-galera-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.501717 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/85632fad-1ab6-495e-9049-6b5dad9cc955-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") " pod="openstack/openstack-cell1-galera-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.501746 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") " pod="openstack/openstack-cell1-galera-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.504998 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/85632fad-1ab6-495e-9049-6b5dad9cc955-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") " pod="openstack/openstack-cell1-galera-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.505157 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/85632fad-1ab6-495e-9049-6b5dad9cc955-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") " pod="openstack/openstack-cell1-galera-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.505216 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85632fad-1ab6-495e-9049-6b5dad9cc955-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") " pod="openstack/openstack-cell1-galera-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.505244 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/85632fad-1ab6-495e-9049-6b5dad9cc955-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") " pod="openstack/openstack-cell1-galera-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.505296 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5f6j\" (UniqueName: \"kubernetes.io/projected/85632fad-1ab6-495e-9049-6b5dad9cc955-kube-api-access-s5f6j\") pod \"openstack-cell1-galera-0\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") " pod="openstack/openstack-cell1-galera-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.506592 4742 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/openstack-cell1-galera-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.508562 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85632fad-1ab6-495e-9049-6b5dad9cc955-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") " pod="openstack/openstack-cell1-galera-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.509554 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/85632fad-1ab6-495e-9049-6b5dad9cc955-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") " pod="openstack/openstack-cell1-galera-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.509765 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/85632fad-1ab6-495e-9049-6b5dad9cc955-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") " pod="openstack/openstack-cell1-galera-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.515619 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/85632fad-1ab6-495e-9049-6b5dad9cc955-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") " pod="openstack/openstack-cell1-galera-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.520238 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85632fad-1ab6-495e-9049-6b5dad9cc955-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") " pod="openstack/openstack-cell1-galera-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.531875 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5f6j\" (UniqueName: \"kubernetes.io/projected/85632fad-1ab6-495e-9049-6b5dad9cc955-kube-api-access-s5f6j\") pod \"openstack-cell1-galera-0\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") " pod="openstack/openstack-cell1-galera-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.560125 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/85632fad-1ab6-495e-9049-6b5dad9cc955-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") " pod="openstack/openstack-cell1-galera-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.599313 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") " pod="openstack/openstack-cell1-galera-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.623335 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.716606 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.717853 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.730317 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.730531 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.730713 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-m8wv4" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.742572 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.821497 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ec392288-7e80-4956-836c-d400d4460ebc-kolla-config\") pod \"memcached-0\" (UID: \"ec392288-7e80-4956-836c-d400d4460ebc\") " pod="openstack/memcached-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.821550 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ec392288-7e80-4956-836c-d400d4460ebc-config-data\") pod \"memcached-0\" (UID: \"ec392288-7e80-4956-836c-d400d4460ebc\") " pod="openstack/memcached-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.821625 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec392288-7e80-4956-836c-d400d4460ebc-memcached-tls-certs\") pod \"memcached-0\" (UID: \"ec392288-7e80-4956-836c-d400d4460ebc\") " pod="openstack/memcached-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.821646 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdg88\" (UniqueName: \"kubernetes.io/projected/ec392288-7e80-4956-836c-d400d4460ebc-kube-api-access-xdg88\") pod \"memcached-0\" (UID: \"ec392288-7e80-4956-836c-d400d4460ebc\") " pod="openstack/memcached-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.821697 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec392288-7e80-4956-836c-d400d4460ebc-combined-ca-bundle\") pod \"memcached-0\" (UID: \"ec392288-7e80-4956-836c-d400d4460ebc\") " pod="openstack/memcached-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.923016 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec392288-7e80-4956-836c-d400d4460ebc-memcached-tls-certs\") pod \"memcached-0\" (UID: \"ec392288-7e80-4956-836c-d400d4460ebc\") " pod="openstack/memcached-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.923454 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdg88\" (UniqueName: \"kubernetes.io/projected/ec392288-7e80-4956-836c-d400d4460ebc-kube-api-access-xdg88\") pod \"memcached-0\" (UID: \"ec392288-7e80-4956-836c-d400d4460ebc\") " pod="openstack/memcached-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.923571 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec392288-7e80-4956-836c-d400d4460ebc-combined-ca-bundle\") pod \"memcached-0\" (UID: \"ec392288-7e80-4956-836c-d400d4460ebc\") " pod="openstack/memcached-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.923612 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ec392288-7e80-4956-836c-d400d4460ebc-kolla-config\") pod \"memcached-0\" (UID: \"ec392288-7e80-4956-836c-d400d4460ebc\") " pod="openstack/memcached-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.923643 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ec392288-7e80-4956-836c-d400d4460ebc-config-data\") pod \"memcached-0\" (UID: \"ec392288-7e80-4956-836c-d400d4460ebc\") " pod="openstack/memcached-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.924589 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ec392288-7e80-4956-836c-d400d4460ebc-kolla-config\") pod \"memcached-0\" (UID: \"ec392288-7e80-4956-836c-d400d4460ebc\") " pod="openstack/memcached-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.924695 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ec392288-7e80-4956-836c-d400d4460ebc-config-data\") pod \"memcached-0\" (UID: \"ec392288-7e80-4956-836c-d400d4460ebc\") " pod="openstack/memcached-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.943966 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec392288-7e80-4956-836c-d400d4460ebc-memcached-tls-certs\") pod \"memcached-0\" (UID: \"ec392288-7e80-4956-836c-d400d4460ebc\") " pod="openstack/memcached-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.946026 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec392288-7e80-4956-836c-d400d4460ebc-combined-ca-bundle\") pod \"memcached-0\" (UID: \"ec392288-7e80-4956-836c-d400d4460ebc\") " pod="openstack/memcached-0" Dec 05 06:09:44 crc kubenswrapper[4742]: I1205 06:09:44.958474 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdg88\" (UniqueName: \"kubernetes.io/projected/ec392288-7e80-4956-836c-d400d4460ebc-kube-api-access-xdg88\") pod \"memcached-0\" (UID: \"ec392288-7e80-4956-836c-d400d4460ebc\") " pod="openstack/memcached-0" Dec 05 06:09:45 crc kubenswrapper[4742]: I1205 06:09:45.051310 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 06:09:45 crc kubenswrapper[4742]: W1205 06:09:45.063603 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85632fad_1ab6_495e_9049_6b5dad9cc955.slice/crio-7e696c06cbb0b2c6f72506516b0181c7f54cff155f629e3b9d17eecfd4d56382 WatchSource:0}: Error finding container 7e696c06cbb0b2c6f72506516b0181c7f54cff155f629e3b9d17eecfd4d56382: Status 404 returned error can't find the container with id 7e696c06cbb0b2c6f72506516b0181c7f54cff155f629e3b9d17eecfd4d56382 Dec 05 06:09:45 crc kubenswrapper[4742]: I1205 06:09:45.075077 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 05 06:09:45 crc kubenswrapper[4742]: I1205 06:09:45.564630 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 05 06:09:45 crc kubenswrapper[4742]: W1205 06:09:45.576667 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podec392288_7e80_4956_836c_d400d4460ebc.slice/crio-0f4a8968ef0375fccb1d5feb3cd34d4bdfb99d2a64520a10ccc06f1e057b030e WatchSource:0}: Error finding container 0f4a8968ef0375fccb1d5feb3cd34d4bdfb99d2a64520a10ccc06f1e057b030e: Status 404 returned error can't find the container with id 0f4a8968ef0375fccb1d5feb3cd34d4bdfb99d2a64520a10ccc06f1e057b030e Dec 05 06:09:46 crc kubenswrapper[4742]: I1205 06:09:46.090412 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"ec392288-7e80-4956-836c-d400d4460ebc","Type":"ContainerStarted","Data":"0f4a8968ef0375fccb1d5feb3cd34d4bdfb99d2a64520a10ccc06f1e057b030e"} Dec 05 06:09:46 crc kubenswrapper[4742]: I1205 06:09:46.091622 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"85632fad-1ab6-495e-9049-6b5dad9cc955","Type":"ContainerStarted","Data":"7e696c06cbb0b2c6f72506516b0181c7f54cff155f629e3b9d17eecfd4d56382"} Dec 05 06:09:46 crc kubenswrapper[4742]: I1205 06:09:46.450840 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 06:09:46 crc kubenswrapper[4742]: I1205 06:09:46.451815 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 06:09:46 crc kubenswrapper[4742]: I1205 06:09:46.459461 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 06:09:46 crc kubenswrapper[4742]: I1205 06:09:46.468957 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-jdgmt" Dec 05 06:09:46 crc kubenswrapper[4742]: I1205 06:09:46.553580 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgdqp\" (UniqueName: \"kubernetes.io/projected/b062c9ad-05c3-4a95-a880-e3b7ccfff3de-kube-api-access-bgdqp\") pod \"kube-state-metrics-0\" (UID: \"b062c9ad-05c3-4a95-a880-e3b7ccfff3de\") " pod="openstack/kube-state-metrics-0" Dec 05 06:09:46 crc kubenswrapper[4742]: I1205 06:09:46.655480 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgdqp\" (UniqueName: \"kubernetes.io/projected/b062c9ad-05c3-4a95-a880-e3b7ccfff3de-kube-api-access-bgdqp\") pod \"kube-state-metrics-0\" (UID: \"b062c9ad-05c3-4a95-a880-e3b7ccfff3de\") " pod="openstack/kube-state-metrics-0" Dec 05 06:09:46 crc kubenswrapper[4742]: I1205 06:09:46.699377 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgdqp\" (UniqueName: \"kubernetes.io/projected/b062c9ad-05c3-4a95-a880-e3b7ccfff3de-kube-api-access-bgdqp\") pod \"kube-state-metrics-0\" (UID: \"b062c9ad-05c3-4a95-a880-e3b7ccfff3de\") " pod="openstack/kube-state-metrics-0" Dec 05 06:09:46 crc kubenswrapper[4742]: I1205 06:09:46.790945 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.092041 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-9n84z"] Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.094921 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9n84z" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.100956 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.101169 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-pbppv" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.104670 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.109166 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-9n84z"] Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.113465 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-tgnp6"] Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.120323 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-tgnp6" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.139557 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-tgnp6"] Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.224947 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-scripts\") pod \"ovn-controller-9n84z\" (UID: \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\") " pod="openstack/ovn-controller-9n84z" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.224996 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/504b6b10-062b-4d3c-8202-fcfd97bc57aa-etc-ovs\") pod \"ovn-controller-ovs-tgnp6\" (UID: \"504b6b10-062b-4d3c-8202-fcfd97bc57aa\") " pod="openstack/ovn-controller-ovs-tgnp6" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.225018 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/504b6b10-062b-4d3c-8202-fcfd97bc57aa-var-run\") pod \"ovn-controller-ovs-tgnp6\" (UID: \"504b6b10-062b-4d3c-8202-fcfd97bc57aa\") " pod="openstack/ovn-controller-ovs-tgnp6" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.225041 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxmhq\" (UniqueName: \"kubernetes.io/projected/504b6b10-062b-4d3c-8202-fcfd97bc57aa-kube-api-access-xxmhq\") pod \"ovn-controller-ovs-tgnp6\" (UID: \"504b6b10-062b-4d3c-8202-fcfd97bc57aa\") " pod="openstack/ovn-controller-ovs-tgnp6" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.225077 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-ovn-controller-tls-certs\") pod \"ovn-controller-9n84z\" (UID: \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\") " pod="openstack/ovn-controller-9n84z" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.225115 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/504b6b10-062b-4d3c-8202-fcfd97bc57aa-var-lib\") pod \"ovn-controller-ovs-tgnp6\" (UID: \"504b6b10-062b-4d3c-8202-fcfd97bc57aa\") " pod="openstack/ovn-controller-ovs-tgnp6" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.225133 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-var-run\") pod \"ovn-controller-9n84z\" (UID: \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\") " pod="openstack/ovn-controller-9n84z" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.225158 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-var-run-ovn\") pod \"ovn-controller-9n84z\" (UID: \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\") " pod="openstack/ovn-controller-9n84z" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.225184 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gg5h\" (UniqueName: \"kubernetes.io/projected/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-kube-api-access-7gg5h\") pod \"ovn-controller-9n84z\" (UID: \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\") " pod="openstack/ovn-controller-9n84z" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.225208 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-var-log-ovn\") pod \"ovn-controller-9n84z\" (UID: \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\") " pod="openstack/ovn-controller-9n84z" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.225230 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/504b6b10-062b-4d3c-8202-fcfd97bc57aa-scripts\") pod \"ovn-controller-ovs-tgnp6\" (UID: \"504b6b10-062b-4d3c-8202-fcfd97bc57aa\") " pod="openstack/ovn-controller-ovs-tgnp6" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.225317 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/504b6b10-062b-4d3c-8202-fcfd97bc57aa-var-log\") pod \"ovn-controller-ovs-tgnp6\" (UID: \"504b6b10-062b-4d3c-8202-fcfd97bc57aa\") " pod="openstack/ovn-controller-ovs-tgnp6" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.225343 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-combined-ca-bundle\") pod \"ovn-controller-9n84z\" (UID: \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\") " pod="openstack/ovn-controller-9n84z" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.326321 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/504b6b10-062b-4d3c-8202-fcfd97bc57aa-var-lib\") pod \"ovn-controller-ovs-tgnp6\" (UID: \"504b6b10-062b-4d3c-8202-fcfd97bc57aa\") " pod="openstack/ovn-controller-ovs-tgnp6" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.326373 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-var-run\") pod \"ovn-controller-9n84z\" (UID: \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\") " pod="openstack/ovn-controller-9n84z" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.326404 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-var-run-ovn\") pod \"ovn-controller-9n84z\" (UID: \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\") " pod="openstack/ovn-controller-9n84z" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.326431 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gg5h\" (UniqueName: \"kubernetes.io/projected/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-kube-api-access-7gg5h\") pod \"ovn-controller-9n84z\" (UID: \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\") " pod="openstack/ovn-controller-9n84z" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.326455 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-var-log-ovn\") pod \"ovn-controller-9n84z\" (UID: \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\") " pod="openstack/ovn-controller-9n84z" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.326477 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/504b6b10-062b-4d3c-8202-fcfd97bc57aa-scripts\") pod \"ovn-controller-ovs-tgnp6\" (UID: \"504b6b10-062b-4d3c-8202-fcfd97bc57aa\") " pod="openstack/ovn-controller-ovs-tgnp6" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.326501 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/504b6b10-062b-4d3c-8202-fcfd97bc57aa-var-log\") pod \"ovn-controller-ovs-tgnp6\" (UID: \"504b6b10-062b-4d3c-8202-fcfd97bc57aa\") " pod="openstack/ovn-controller-ovs-tgnp6" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.326521 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-combined-ca-bundle\") pod \"ovn-controller-9n84z\" (UID: \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\") " pod="openstack/ovn-controller-9n84z" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.326545 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-scripts\") pod \"ovn-controller-9n84z\" (UID: \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\") " pod="openstack/ovn-controller-9n84z" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.326562 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/504b6b10-062b-4d3c-8202-fcfd97bc57aa-etc-ovs\") pod \"ovn-controller-ovs-tgnp6\" (UID: \"504b6b10-062b-4d3c-8202-fcfd97bc57aa\") " pod="openstack/ovn-controller-ovs-tgnp6" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.326576 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/504b6b10-062b-4d3c-8202-fcfd97bc57aa-var-run\") pod \"ovn-controller-ovs-tgnp6\" (UID: \"504b6b10-062b-4d3c-8202-fcfd97bc57aa\") " pod="openstack/ovn-controller-ovs-tgnp6" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.326593 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxmhq\" (UniqueName: \"kubernetes.io/projected/504b6b10-062b-4d3c-8202-fcfd97bc57aa-kube-api-access-xxmhq\") pod \"ovn-controller-ovs-tgnp6\" (UID: \"504b6b10-062b-4d3c-8202-fcfd97bc57aa\") " pod="openstack/ovn-controller-ovs-tgnp6" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.326610 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-ovn-controller-tls-certs\") pod \"ovn-controller-9n84z\" (UID: \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\") " pod="openstack/ovn-controller-9n84z" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.327350 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/504b6b10-062b-4d3c-8202-fcfd97bc57aa-var-log\") pod \"ovn-controller-ovs-tgnp6\" (UID: \"504b6b10-062b-4d3c-8202-fcfd97bc57aa\") " pod="openstack/ovn-controller-ovs-tgnp6" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.327411 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/504b6b10-062b-4d3c-8202-fcfd97bc57aa-var-lib\") pod \"ovn-controller-ovs-tgnp6\" (UID: \"504b6b10-062b-4d3c-8202-fcfd97bc57aa\") " pod="openstack/ovn-controller-ovs-tgnp6" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.327472 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/504b6b10-062b-4d3c-8202-fcfd97bc57aa-etc-ovs\") pod \"ovn-controller-ovs-tgnp6\" (UID: \"504b6b10-062b-4d3c-8202-fcfd97bc57aa\") " pod="openstack/ovn-controller-ovs-tgnp6" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.327503 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-var-log-ovn\") pod \"ovn-controller-9n84z\" (UID: \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\") " pod="openstack/ovn-controller-9n84z" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.327529 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-var-run\") pod \"ovn-controller-9n84z\" (UID: \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\") " pod="openstack/ovn-controller-9n84z" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.327536 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/504b6b10-062b-4d3c-8202-fcfd97bc57aa-var-run\") pod \"ovn-controller-ovs-tgnp6\" (UID: \"504b6b10-062b-4d3c-8202-fcfd97bc57aa\") " pod="openstack/ovn-controller-ovs-tgnp6" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.327487 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-var-run-ovn\") pod \"ovn-controller-9n84z\" (UID: \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\") " pod="openstack/ovn-controller-9n84z" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.328973 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-scripts\") pod \"ovn-controller-9n84z\" (UID: \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\") " pod="openstack/ovn-controller-9n84z" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.329840 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/504b6b10-062b-4d3c-8202-fcfd97bc57aa-scripts\") pod \"ovn-controller-ovs-tgnp6\" (UID: \"504b6b10-062b-4d3c-8202-fcfd97bc57aa\") " pod="openstack/ovn-controller-ovs-tgnp6" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.339831 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-ovn-controller-tls-certs\") pod \"ovn-controller-9n84z\" (UID: \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\") " pod="openstack/ovn-controller-9n84z" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.339923 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-combined-ca-bundle\") pod \"ovn-controller-9n84z\" (UID: \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\") " pod="openstack/ovn-controller-9n84z" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.343179 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gg5h\" (UniqueName: \"kubernetes.io/projected/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-kube-api-access-7gg5h\") pod \"ovn-controller-9n84z\" (UID: \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\") " pod="openstack/ovn-controller-9n84z" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.343592 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxmhq\" (UniqueName: \"kubernetes.io/projected/504b6b10-062b-4d3c-8202-fcfd97bc57aa-kube-api-access-xxmhq\") pod \"ovn-controller-ovs-tgnp6\" (UID: \"504b6b10-062b-4d3c-8202-fcfd97bc57aa\") " pod="openstack/ovn-controller-ovs-tgnp6" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.417942 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9n84z" Dec 05 06:09:50 crc kubenswrapper[4742]: I1205 06:09:50.441199 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-tgnp6" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.324095 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.326352 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.328830 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.329083 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.329195 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-bdmx5" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.329481 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.329700 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.340141 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.476124 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a88c6674-8c2f-4868-8839-1ec313fbfe8e-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.476192 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a88c6674-8c2f-4868-8839-1ec313fbfe8e-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.476226 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a88c6674-8c2f-4868-8839-1ec313fbfe8e-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.476250 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.476542 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgpz8\" (UniqueName: \"kubernetes.io/projected/a88c6674-8c2f-4868-8839-1ec313fbfe8e-kube-api-access-fgpz8\") pod \"ovsdbserver-nb-0\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.476597 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a88c6674-8c2f-4868-8839-1ec313fbfe8e-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.476619 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a88c6674-8c2f-4868-8839-1ec313fbfe8e-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.476689 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a88c6674-8c2f-4868-8839-1ec313fbfe8e-config\") pod \"ovsdbserver-nb-0\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.527274 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.528854 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.530950 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.530985 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.531452 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.532915 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-rqrkw" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.554159 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.578578 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.578623 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a88c6674-8c2f-4868-8839-1ec313fbfe8e-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.578672 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgpz8\" (UniqueName: \"kubernetes.io/projected/a88c6674-8c2f-4868-8839-1ec313fbfe8e-kube-api-access-fgpz8\") pod \"ovsdbserver-nb-0\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.578699 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a88c6674-8c2f-4868-8839-1ec313fbfe8e-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.578723 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a88c6674-8c2f-4868-8839-1ec313fbfe8e-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.578763 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a88c6674-8c2f-4868-8839-1ec313fbfe8e-config\") pod \"ovsdbserver-nb-0\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.578817 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a88c6674-8c2f-4868-8839-1ec313fbfe8e-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.578865 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a88c6674-8c2f-4868-8839-1ec313fbfe8e-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.578963 4742 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/ovsdbserver-nb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.579766 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a88c6674-8c2f-4868-8839-1ec313fbfe8e-config\") pod \"ovsdbserver-nb-0\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.580024 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a88c6674-8c2f-4868-8839-1ec313fbfe8e-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.583396 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a88c6674-8c2f-4868-8839-1ec313fbfe8e-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.584138 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a88c6674-8c2f-4868-8839-1ec313fbfe8e-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.590270 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a88c6674-8c2f-4868-8839-1ec313fbfe8e-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.591240 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a88c6674-8c2f-4868-8839-1ec313fbfe8e-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.594877 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgpz8\" (UniqueName: \"kubernetes.io/projected/a88c6674-8c2f-4868-8839-1ec313fbfe8e-kube-api-access-fgpz8\") pod \"ovsdbserver-nb-0\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.602273 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") " pod="openstack/ovsdbserver-nb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.657769 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.693970 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8e993d8-0221-4214-b00a-ca745e716bbe-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") " pod="openstack/ovsdbserver-sb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.694121 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b8e993d8-0221-4214-b00a-ca745e716bbe-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") " pod="openstack/ovsdbserver-sb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.694156 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b8e993d8-0221-4214-b00a-ca745e716bbe-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") " pod="openstack/ovsdbserver-sb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.694180 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7scpt\" (UniqueName: \"kubernetes.io/projected/b8e993d8-0221-4214-b00a-ca745e716bbe-kube-api-access-7scpt\") pod \"ovsdbserver-sb-0\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") " pod="openstack/ovsdbserver-sb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.694224 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b8e993d8-0221-4214-b00a-ca745e716bbe-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") " pod="openstack/ovsdbserver-sb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.694245 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") " pod="openstack/ovsdbserver-sb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.694268 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8e993d8-0221-4214-b00a-ca745e716bbe-config\") pod \"ovsdbserver-sb-0\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") " pod="openstack/ovsdbserver-sb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.694312 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b8e993d8-0221-4214-b00a-ca745e716bbe-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") " pod="openstack/ovsdbserver-sb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.795844 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b8e993d8-0221-4214-b00a-ca745e716bbe-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") " pod="openstack/ovsdbserver-sb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.795896 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b8e993d8-0221-4214-b00a-ca745e716bbe-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") " pod="openstack/ovsdbserver-sb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.795918 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7scpt\" (UniqueName: \"kubernetes.io/projected/b8e993d8-0221-4214-b00a-ca745e716bbe-kube-api-access-7scpt\") pod \"ovsdbserver-sb-0\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") " pod="openstack/ovsdbserver-sb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.795953 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") " pod="openstack/ovsdbserver-sb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.795968 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b8e993d8-0221-4214-b00a-ca745e716bbe-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") " pod="openstack/ovsdbserver-sb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.795985 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8e993d8-0221-4214-b00a-ca745e716bbe-config\") pod \"ovsdbserver-sb-0\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") " pod="openstack/ovsdbserver-sb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.796020 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b8e993d8-0221-4214-b00a-ca745e716bbe-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") " pod="openstack/ovsdbserver-sb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.796042 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8e993d8-0221-4214-b00a-ca745e716bbe-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") " pod="openstack/ovsdbserver-sb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.796179 4742 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/ovsdbserver-sb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.797256 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8e993d8-0221-4214-b00a-ca745e716bbe-config\") pod \"ovsdbserver-sb-0\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") " pod="openstack/ovsdbserver-sb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.797532 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b8e993d8-0221-4214-b00a-ca745e716bbe-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") " pod="openstack/ovsdbserver-sb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.797547 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b8e993d8-0221-4214-b00a-ca745e716bbe-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") " pod="openstack/ovsdbserver-sb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.805762 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b8e993d8-0221-4214-b00a-ca745e716bbe-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") " pod="openstack/ovsdbserver-sb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.806338 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b8e993d8-0221-4214-b00a-ca745e716bbe-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") " pod="openstack/ovsdbserver-sb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.806446 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8e993d8-0221-4214-b00a-ca745e716bbe-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") " pod="openstack/ovsdbserver-sb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.821000 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") " pod="openstack/ovsdbserver-sb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.823911 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7scpt\" (UniqueName: \"kubernetes.io/projected/b8e993d8-0221-4214-b00a-ca745e716bbe-kube-api-access-7scpt\") pod \"ovsdbserver-sb-0\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") " pod="openstack/ovsdbserver-sb-0" Dec 05 06:09:53 crc kubenswrapper[4742]: I1205 06:09:53.856950 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 06:10:11 crc kubenswrapper[4742]: E1205 06:10:11.324576 4742 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-memcached:current-podified" Dec 05 06:10:11 crc kubenswrapper[4742]: E1205 06:10:11.325589 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:memcached,Image:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,Command:[/usr/bin/dumb-init -- /usr/local/bin/kolla_start],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:memcached,HostPort:0,ContainerPort:11211,Protocol:TCP,HostIP:,},ContainerPort{Name:memcached-tls,HostPort:0,ContainerPort:11212,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:POD_IPS,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIPs,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:CONFIG_HASH,Value:n689h58fh95h9fh96h7bh86h95h6dh654h5d5h648hf7h675h54fh578h658hdbh66ch679h66h55bh569h676hd8hbfh554h649h6bh5dfhc5h9cq,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/src,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/certs/memcached.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/private/memcached.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xdg88,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42457,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42457,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod memcached-0_openstack(ec392288-7e80-4956-836c-d400d4460ebc): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 06:10:11 crc kubenswrapper[4742]: E1205 06:10:11.328623 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/memcached-0" podUID="ec392288-7e80-4956-836c-d400d4460ebc" Dec 05 06:10:12 crc kubenswrapper[4742]: E1205 06:10:12.365767 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-memcached:current-podified\\\"\"" pod="openstack/memcached-0" podUID="ec392288-7e80-4956-836c-d400d4460ebc" Dec 05 06:10:13 crc kubenswrapper[4742]: E1205 06:10:13.196154 4742 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb:current-podified" Dec 05 06:10:13 crc kubenswrapper[4742]: E1205 06:10:13.196414 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s5f6j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-cell1-galera-0_openstack(85632fad-1ab6-495e-9049-6b5dad9cc955): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 06:10:13 crc kubenswrapper[4742]: E1205 06:10:13.197872 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-cell1-galera-0" podUID="85632fad-1ab6-495e-9049-6b5dad9cc955" Dec 05 06:10:13 crc kubenswrapper[4742]: E1205 06:10:13.372774 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb:current-podified\\\"\"" pod="openstack/openstack-cell1-galera-0" podUID="85632fad-1ab6-495e-9049-6b5dad9cc955" Dec 05 06:10:14 crc kubenswrapper[4742]: E1205 06:10:14.196905 4742 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Dec 05 06:10:14 crc kubenswrapper[4742]: E1205 06:10:14.197648 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cdk76,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(d6b096f4-483e-48c5-a3e1-a178c0c5ae6e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 06:10:14 crc kubenswrapper[4742]: E1205 06:10:14.199175 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="d6b096f4-483e-48c5-a3e1-a178c0c5ae6e" Dec 05 06:10:14 crc kubenswrapper[4742]: E1205 06:10:14.201527 4742 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb:current-podified" Dec 05 06:10:14 crc kubenswrapper[4742]: E1205 06:10:14.201728 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kbqk6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_openstack(c4ba4170-0240-42d9-85f4-cf3587f39f02): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 06:10:14 crc kubenswrapper[4742]: E1205 06:10:14.203403 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-galera-0" podUID="c4ba4170-0240-42d9-85f4-cf3587f39f02" Dec 05 06:10:14 crc kubenswrapper[4742]: E1205 06:10:14.212927 4742 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Dec 05 06:10:14 crc kubenswrapper[4742]: E1205 06:10:14.213150 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wd582,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(7b5d8165-e06e-4600-9cab-9cf84c010725): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 06:10:14 crc kubenswrapper[4742]: E1205 06:10:14.214381 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="7b5d8165-e06e-4600-9cab-9cf84c010725" Dec 05 06:10:14 crc kubenswrapper[4742]: E1205 06:10:14.378357 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb:current-podified\\\"\"" pod="openstack/openstack-galera-0" podUID="c4ba4170-0240-42d9-85f4-cf3587f39f02" Dec 05 06:10:14 crc kubenswrapper[4742]: E1205 06:10:14.379393 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="7b5d8165-e06e-4600-9cab-9cf84c010725" Dec 05 06:10:14 crc kubenswrapper[4742]: E1205 06:10:14.379587 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-server-0" podUID="d6b096f4-483e-48c5-a3e1-a178c0c5ae6e" Dec 05 06:10:14 crc kubenswrapper[4742]: E1205 06:10:14.935375 4742 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 05 06:10:14 crc kubenswrapper[4742]: E1205 06:10:14.935570 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-sp9px,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-k6klm_openstack(c8b5571b-2c4d-4d16-9b15-78fb3560c257): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 06:10:14 crc kubenswrapper[4742]: E1205 06:10:14.936782 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-k6klm" podUID="c8b5571b-2c4d-4d16-9b15-78fb3560c257" Dec 05 06:10:14 crc kubenswrapper[4742]: E1205 06:10:14.946165 4742 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 05 06:10:14 crc kubenswrapper[4742]: E1205 06:10:14.946353 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ftsqx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-hqv9s_openstack(5b4bd57b-6c52-4568-93c1-7679338bf8b0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 06:10:14 crc kubenswrapper[4742]: E1205 06:10:14.947550 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-hqv9s" podUID="5b4bd57b-6c52-4568-93c1-7679338bf8b0" Dec 05 06:10:14 crc kubenswrapper[4742]: E1205 06:10:14.979690 4742 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 05 06:10:14 crc kubenswrapper[4742]: E1205 06:10:14.979876 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xwln2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-89nxv_openstack(ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 06:10:14 crc kubenswrapper[4742]: E1205 06:10:14.981219 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-89nxv" podUID="ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2" Dec 05 06:10:14 crc kubenswrapper[4742]: E1205 06:10:14.992464 4742 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 05 06:10:14 crc kubenswrapper[4742]: E1205 06:10:14.992642 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cpc56,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-2bzx4_openstack(7df0e37d-ee7e-4e3f-a797-703bc6b39545): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 06:10:14 crc kubenswrapper[4742]: E1205 06:10:14.994041 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-666b6646f7-2bzx4" podUID="7df0e37d-ee7e-4e3f-a797-703bc6b39545" Dec 05 06:10:15 crc kubenswrapper[4742]: E1205 06:10:15.386536 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-89nxv" podUID="ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2" Dec 05 06:10:15 crc kubenswrapper[4742]: E1205 06:10:15.386961 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-666b6646f7-2bzx4" podUID="7df0e37d-ee7e-4e3f-a797-703bc6b39545" Dec 05 06:10:15 crc kubenswrapper[4742]: I1205 06:10:15.451682 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-9n84z"] Dec 05 06:10:15 crc kubenswrapper[4742]: I1205 06:10:15.460438 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 06:10:15 crc kubenswrapper[4742]: W1205 06:10:15.501600 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb5df8784_b63d_41b7_a542_dcf53ea6cc5e.slice/crio-f917511ece062ed88b054cee51df7c07882cb08aec93d7338f27131f1bc76bf5 WatchSource:0}: Error finding container f917511ece062ed88b054cee51df7c07882cb08aec93d7338f27131f1bc76bf5: Status 404 returned error can't find the container with id f917511ece062ed88b054cee51df7c07882cb08aec93d7338f27131f1bc76bf5 Dec 05 06:10:15 crc kubenswrapper[4742]: I1205 06:10:15.590301 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 06:10:15 crc kubenswrapper[4742]: I1205 06:10:15.694813 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 06:10:15 crc kubenswrapper[4742]: W1205 06:10:15.699906 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda88c6674_8c2f_4868_8839_1ec313fbfe8e.slice/crio-0bc73304c447d34eca790cf093a15dc151dbf2bae8378b7b5a87aaf6245d1d9f WatchSource:0}: Error finding container 0bc73304c447d34eca790cf093a15dc151dbf2bae8378b7b5a87aaf6245d1d9f: Status 404 returned error can't find the container with id 0bc73304c447d34eca790cf093a15dc151dbf2bae8378b7b5a87aaf6245d1d9f Dec 05 06:10:15 crc kubenswrapper[4742]: I1205 06:10:15.759940 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-k6klm" Dec 05 06:10:15 crc kubenswrapper[4742]: I1205 06:10:15.787213 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-hqv9s" Dec 05 06:10:15 crc kubenswrapper[4742]: I1205 06:10:15.925357 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8b5571b-2c4d-4d16-9b15-78fb3560c257-config\") pod \"c8b5571b-2c4d-4d16-9b15-78fb3560c257\" (UID: \"c8b5571b-2c4d-4d16-9b15-78fb3560c257\") " Dec 05 06:10:15 crc kubenswrapper[4742]: I1205 06:10:15.925456 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b4bd57b-6c52-4568-93c1-7679338bf8b0-dns-svc\") pod \"5b4bd57b-6c52-4568-93c1-7679338bf8b0\" (UID: \"5b4bd57b-6c52-4568-93c1-7679338bf8b0\") " Dec 05 06:10:15 crc kubenswrapper[4742]: I1205 06:10:15.925518 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sp9px\" (UniqueName: \"kubernetes.io/projected/c8b5571b-2c4d-4d16-9b15-78fb3560c257-kube-api-access-sp9px\") pod \"c8b5571b-2c4d-4d16-9b15-78fb3560c257\" (UID: \"c8b5571b-2c4d-4d16-9b15-78fb3560c257\") " Dec 05 06:10:15 crc kubenswrapper[4742]: I1205 06:10:15.925604 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ftsqx\" (UniqueName: \"kubernetes.io/projected/5b4bd57b-6c52-4568-93c1-7679338bf8b0-kube-api-access-ftsqx\") pod \"5b4bd57b-6c52-4568-93c1-7679338bf8b0\" (UID: \"5b4bd57b-6c52-4568-93c1-7679338bf8b0\") " Dec 05 06:10:15 crc kubenswrapper[4742]: I1205 06:10:15.925645 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b4bd57b-6c52-4568-93c1-7679338bf8b0-config\") pod \"5b4bd57b-6c52-4568-93c1-7679338bf8b0\" (UID: \"5b4bd57b-6c52-4568-93c1-7679338bf8b0\") " Dec 05 06:10:15 crc kubenswrapper[4742]: I1205 06:10:15.925922 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b4bd57b-6c52-4568-93c1-7679338bf8b0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5b4bd57b-6c52-4568-93c1-7679338bf8b0" (UID: "5b4bd57b-6c52-4568-93c1-7679338bf8b0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:10:15 crc kubenswrapper[4742]: I1205 06:10:15.925940 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c8b5571b-2c4d-4d16-9b15-78fb3560c257-config" (OuterVolumeSpecName: "config") pod "c8b5571b-2c4d-4d16-9b15-78fb3560c257" (UID: "c8b5571b-2c4d-4d16-9b15-78fb3560c257"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:10:15 crc kubenswrapper[4742]: I1205 06:10:15.926506 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b4bd57b-6c52-4568-93c1-7679338bf8b0-config" (OuterVolumeSpecName: "config") pod "5b4bd57b-6c52-4568-93c1-7679338bf8b0" (UID: "5b4bd57b-6c52-4568-93c1-7679338bf8b0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:10:15 crc kubenswrapper[4742]: I1205 06:10:15.933667 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8b5571b-2c4d-4d16-9b15-78fb3560c257-kube-api-access-sp9px" (OuterVolumeSpecName: "kube-api-access-sp9px") pod "c8b5571b-2c4d-4d16-9b15-78fb3560c257" (UID: "c8b5571b-2c4d-4d16-9b15-78fb3560c257"). InnerVolumeSpecName "kube-api-access-sp9px". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:10:15 crc kubenswrapper[4742]: I1205 06:10:15.933709 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b4bd57b-6c52-4568-93c1-7679338bf8b0-kube-api-access-ftsqx" (OuterVolumeSpecName: "kube-api-access-ftsqx") pod "5b4bd57b-6c52-4568-93c1-7679338bf8b0" (UID: "5b4bd57b-6c52-4568-93c1-7679338bf8b0"). InnerVolumeSpecName "kube-api-access-ftsqx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:10:16 crc kubenswrapper[4742]: I1205 06:10:16.027140 4742 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b4bd57b-6c52-4568-93c1-7679338bf8b0-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:16 crc kubenswrapper[4742]: I1205 06:10:16.027179 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sp9px\" (UniqueName: \"kubernetes.io/projected/c8b5571b-2c4d-4d16-9b15-78fb3560c257-kube-api-access-sp9px\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:16 crc kubenswrapper[4742]: I1205 06:10:16.027193 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ftsqx\" (UniqueName: \"kubernetes.io/projected/5b4bd57b-6c52-4568-93c1-7679338bf8b0-kube-api-access-ftsqx\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:16 crc kubenswrapper[4742]: I1205 06:10:16.027205 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b4bd57b-6c52-4568-93c1-7679338bf8b0-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:16 crc kubenswrapper[4742]: I1205 06:10:16.027217 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c8b5571b-2c4d-4d16-9b15-78fb3560c257-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:16 crc kubenswrapper[4742]: I1205 06:10:16.396330 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-k6klm" event={"ID":"c8b5571b-2c4d-4d16-9b15-78fb3560c257","Type":"ContainerDied","Data":"ac5cccea903608d4dcfb60a1d7388925cd824b189a74664535e878f91c444768"} Dec 05 06:10:16 crc kubenswrapper[4742]: I1205 06:10:16.396428 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-k6klm" Dec 05 06:10:16 crc kubenswrapper[4742]: I1205 06:10:16.400886 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-hqv9s" event={"ID":"5b4bd57b-6c52-4568-93c1-7679338bf8b0","Type":"ContainerDied","Data":"32dea309050f6d07461c3de4afe23e2008889ed80f36b2e2cbc40022cf39f341"} Dec 05 06:10:16 crc kubenswrapper[4742]: I1205 06:10:16.400960 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-hqv9s" Dec 05 06:10:16 crc kubenswrapper[4742]: I1205 06:10:16.403350 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-9n84z" event={"ID":"b5df8784-b63d-41b7-a542-dcf53ea6cc5e","Type":"ContainerStarted","Data":"f917511ece062ed88b054cee51df7c07882cb08aec93d7338f27131f1bc76bf5"} Dec 05 06:10:16 crc kubenswrapper[4742]: I1205 06:10:16.405555 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"a88c6674-8c2f-4868-8839-1ec313fbfe8e","Type":"ContainerStarted","Data":"0bc73304c447d34eca790cf093a15dc151dbf2bae8378b7b5a87aaf6245d1d9f"} Dec 05 06:10:16 crc kubenswrapper[4742]: I1205 06:10:16.406937 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b062c9ad-05c3-4a95-a880-e3b7ccfff3de","Type":"ContainerStarted","Data":"e9b74aced60cb56306fb19046c4166b10b4d86166857e8f9003cb54ff5aa8359"} Dec 05 06:10:16 crc kubenswrapper[4742]: I1205 06:10:16.409142 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"b8e993d8-0221-4214-b00a-ca745e716bbe","Type":"ContainerStarted","Data":"3da10f38100c90d2e73c2c8207528c35dd5293060f371cb44777cbb35f8867fc"} Dec 05 06:10:16 crc kubenswrapper[4742]: I1205 06:10:16.459890 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-k6klm"] Dec 05 06:10:16 crc kubenswrapper[4742]: I1205 06:10:16.474517 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-k6klm"] Dec 05 06:10:16 crc kubenswrapper[4742]: I1205 06:10:16.487607 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-hqv9s"] Dec 05 06:10:16 crc kubenswrapper[4742]: I1205 06:10:16.494014 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-hqv9s"] Dec 05 06:10:16 crc kubenswrapper[4742]: I1205 06:10:16.621493 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-tgnp6"] Dec 05 06:10:16 crc kubenswrapper[4742]: W1205 06:10:16.791310 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod504b6b10_062b_4d3c_8202_fcfd97bc57aa.slice/crio-57559d840c8bf91e31b5c51ae2cd65e01b1158303c7d096915810505d014966a WatchSource:0}: Error finding container 57559d840c8bf91e31b5c51ae2cd65e01b1158303c7d096915810505d014966a: Status 404 returned error can't find the container with id 57559d840c8bf91e31b5c51ae2cd65e01b1158303c7d096915810505d014966a Dec 05 06:10:17 crc kubenswrapper[4742]: I1205 06:10:17.426974 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-tgnp6" event={"ID":"504b6b10-062b-4d3c-8202-fcfd97bc57aa","Type":"ContainerStarted","Data":"57559d840c8bf91e31b5c51ae2cd65e01b1158303c7d096915810505d014966a"} Dec 05 06:10:18 crc kubenswrapper[4742]: I1205 06:10:18.398018 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b4bd57b-6c52-4568-93c1-7679338bf8b0" path="/var/lib/kubelet/pods/5b4bd57b-6c52-4568-93c1-7679338bf8b0/volumes" Dec 05 06:10:18 crc kubenswrapper[4742]: I1205 06:10:18.399332 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8b5571b-2c4d-4d16-9b15-78fb3560c257" path="/var/lib/kubelet/pods/c8b5571b-2c4d-4d16-9b15-78fb3560c257/volumes" Dec 05 06:10:20 crc kubenswrapper[4742]: I1205 06:10:20.452823 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"b8e993d8-0221-4214-b00a-ca745e716bbe","Type":"ContainerStarted","Data":"75b9102a45bd7348ee8155d1c3b59cc40301c88e77fb8f3ce1855a075666bb90"} Dec 05 06:10:20 crc kubenswrapper[4742]: I1205 06:10:20.454584 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-9n84z" event={"ID":"b5df8784-b63d-41b7-a542-dcf53ea6cc5e","Type":"ContainerStarted","Data":"54edd9b1ddd6e1ba491286ac963077f358b82344ad584ecba6ecb84b4f7da42c"} Dec 05 06:10:20 crc kubenswrapper[4742]: I1205 06:10:20.454760 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-9n84z" Dec 05 06:10:20 crc kubenswrapper[4742]: I1205 06:10:20.460000 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-tgnp6" event={"ID":"504b6b10-062b-4d3c-8202-fcfd97bc57aa","Type":"ContainerStarted","Data":"652a22f9d708ac555b3ac5eb928fa8fe7dbbb5c68552672921a0164adc81ce23"} Dec 05 06:10:20 crc kubenswrapper[4742]: I1205 06:10:20.462125 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"a88c6674-8c2f-4868-8839-1ec313fbfe8e","Type":"ContainerStarted","Data":"9d256f9bac05a4b6bb691eed52a6f2da591190f33538fc2e7323132010170272"} Dec 05 06:10:20 crc kubenswrapper[4742]: I1205 06:10:20.463411 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b062c9ad-05c3-4a95-a880-e3b7ccfff3de","Type":"ContainerStarted","Data":"148e03b7513c06d95dcc0aa7eee3cfaf868afafc063c6ea1ceacb1994d13b688"} Dec 05 06:10:20 crc kubenswrapper[4742]: I1205 06:10:20.464079 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 05 06:10:20 crc kubenswrapper[4742]: I1205 06:10:20.476218 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-9n84z" podStartSLOduration=26.172230665 podStartE2EDuration="30.476184421s" podCreationTimestamp="2025-12-05 06:09:50 +0000 UTC" firstStartedPulling="2025-12-05 06:10:15.512334661 +0000 UTC m=+1091.424469723" lastFinishedPulling="2025-12-05 06:10:19.816288407 +0000 UTC m=+1095.728423479" observedRunningTime="2025-12-05 06:10:20.470554445 +0000 UTC m=+1096.382689527" watchObservedRunningTime="2025-12-05 06:10:20.476184421 +0000 UTC m=+1096.388319483" Dec 05 06:10:20 crc kubenswrapper[4742]: I1205 06:10:20.486279 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=30.064762601 podStartE2EDuration="34.486259291s" podCreationTimestamp="2025-12-05 06:09:46 +0000 UTC" firstStartedPulling="2025-12-05 06:10:15.512000412 +0000 UTC m=+1091.424135474" lastFinishedPulling="2025-12-05 06:10:19.933497102 +0000 UTC m=+1095.845632164" observedRunningTime="2025-12-05 06:10:20.483466178 +0000 UTC m=+1096.395601250" watchObservedRunningTime="2025-12-05 06:10:20.486259291 +0000 UTC m=+1096.398394363" Dec 05 06:10:21 crc kubenswrapper[4742]: I1205 06:10:21.473209 4742 generic.go:334] "Generic (PLEG): container finished" podID="504b6b10-062b-4d3c-8202-fcfd97bc57aa" containerID="652a22f9d708ac555b3ac5eb928fa8fe7dbbb5c68552672921a0164adc81ce23" exitCode=0 Dec 05 06:10:21 crc kubenswrapper[4742]: I1205 06:10:21.473430 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-tgnp6" event={"ID":"504b6b10-062b-4d3c-8202-fcfd97bc57aa","Type":"ContainerDied","Data":"652a22f9d708ac555b3ac5eb928fa8fe7dbbb5c68552672921a0164adc81ce23"} Dec 05 06:10:23 crc kubenswrapper[4742]: I1205 06:10:23.492634 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"b8e993d8-0221-4214-b00a-ca745e716bbe","Type":"ContainerStarted","Data":"3fa7e542679f95885cdd8ab99d9224e870ebcce54df90ffe1a480a639c7703e9"} Dec 05 06:10:23 crc kubenswrapper[4742]: I1205 06:10:23.519741 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=23.804551194 podStartE2EDuration="31.519725632s" podCreationTimestamp="2025-12-05 06:09:52 +0000 UTC" firstStartedPulling="2025-12-05 06:10:15.587264575 +0000 UTC m=+1091.499399637" lastFinishedPulling="2025-12-05 06:10:23.302439003 +0000 UTC m=+1099.214574075" observedRunningTime="2025-12-05 06:10:23.517588237 +0000 UTC m=+1099.429723319" watchObservedRunningTime="2025-12-05 06:10:23.519725632 +0000 UTC m=+1099.431860694" Dec 05 06:10:23 crc kubenswrapper[4742]: I1205 06:10:23.858137 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 05 06:10:23 crc kubenswrapper[4742]: I1205 06:10:23.858499 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 05 06:10:23 crc kubenswrapper[4742]: I1205 06:10:23.929393 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 05 06:10:24 crc kubenswrapper[4742]: I1205 06:10:24.506146 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"a88c6674-8c2f-4868-8839-1ec313fbfe8e","Type":"ContainerStarted","Data":"1c678ad593b1fd73c0db215db0715772580dca8117af0aa42f0c6d499e00b732"} Dec 05 06:10:24 crc kubenswrapper[4742]: I1205 06:10:24.509292 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-tgnp6" event={"ID":"504b6b10-062b-4d3c-8202-fcfd97bc57aa","Type":"ContainerStarted","Data":"2fe19f91420a01210a21d4371d649e6ddfbc92c7b68947c6b76d971d5d43a7cc"} Dec 05 06:10:24 crc kubenswrapper[4742]: I1205 06:10:24.509336 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-tgnp6" event={"ID":"504b6b10-062b-4d3c-8202-fcfd97bc57aa","Type":"ContainerStarted","Data":"56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8"} Dec 05 06:10:24 crc kubenswrapper[4742]: I1205 06:10:24.536016 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=24.923754504 podStartE2EDuration="32.535987125s" podCreationTimestamp="2025-12-05 06:09:52 +0000 UTC" firstStartedPulling="2025-12-05 06:10:15.702453569 +0000 UTC m=+1091.614588631" lastFinishedPulling="2025-12-05 06:10:23.31468619 +0000 UTC m=+1099.226821252" observedRunningTime="2025-12-05 06:10:24.524670583 +0000 UTC m=+1100.436805685" watchObservedRunningTime="2025-12-05 06:10:24.535987125 +0000 UTC m=+1100.448122247" Dec 05 06:10:24 crc kubenswrapper[4742]: I1205 06:10:24.549691 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-tgnp6" podStartSLOduration=31.429307194 podStartE2EDuration="34.549671298s" podCreationTimestamp="2025-12-05 06:09:50 +0000 UTC" firstStartedPulling="2025-12-05 06:10:16.793969133 +0000 UTC m=+1092.706104195" lastFinishedPulling="2025-12-05 06:10:19.914333217 +0000 UTC m=+1095.826468299" observedRunningTime="2025-12-05 06:10:24.545713956 +0000 UTC m=+1100.457849058" watchObservedRunningTime="2025-12-05 06:10:24.549671298 +0000 UTC m=+1100.461806370" Dec 05 06:10:25 crc kubenswrapper[4742]: I1205 06:10:25.442738 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-tgnp6" Dec 05 06:10:25 crc kubenswrapper[4742]: I1205 06:10:25.442791 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-tgnp6" Dec 05 06:10:25 crc kubenswrapper[4742]: I1205 06:10:25.563250 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 05 06:10:25 crc kubenswrapper[4742]: I1205 06:10:25.835569 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-89nxv"] Dec 05 06:10:25 crc kubenswrapper[4742]: I1205 06:10:25.895725 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-whtm9"] Dec 05 06:10:25 crc kubenswrapper[4742]: I1205 06:10:25.897320 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-whtm9" Dec 05 06:10:25 crc kubenswrapper[4742]: I1205 06:10:25.900572 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 05 06:10:25 crc kubenswrapper[4742]: I1205 06:10:25.916632 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-cvdpt"] Dec 05 06:10:25 crc kubenswrapper[4742]: I1205 06:10:25.918157 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-cvdpt" Dec 05 06:10:25 crc kubenswrapper[4742]: I1205 06:10:25.922368 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 05 06:10:25 crc kubenswrapper[4742]: I1205 06:10:25.942343 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-whtm9"] Dec 05 06:10:25 crc kubenswrapper[4742]: I1205 06:10:25.980235 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-cvdpt"] Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.002316 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/336f4b9b-f373-49dc-879e-498fc791863d-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-cvdpt\" (UID: \"336f4b9b-f373-49dc-879e-498fc791863d\") " pod="openstack/dnsmasq-dns-6bc7876d45-cvdpt" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.002356 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h64qz\" (UniqueName: \"kubernetes.io/projected/ebded868-aaf1-4294-bec1-ec504cdf1810-kube-api-access-h64qz\") pod \"ovn-controller-metrics-whtm9\" (UID: \"ebded868-aaf1-4294-bec1-ec504cdf1810\") " pod="openstack/ovn-controller-metrics-whtm9" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.002391 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/336f4b9b-f373-49dc-879e-498fc791863d-config\") pod \"dnsmasq-dns-6bc7876d45-cvdpt\" (UID: \"336f4b9b-f373-49dc-879e-498fc791863d\") " pod="openstack/dnsmasq-dns-6bc7876d45-cvdpt" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.002454 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/ebded868-aaf1-4294-bec1-ec504cdf1810-ovn-rundir\") pod \"ovn-controller-metrics-whtm9\" (UID: \"ebded868-aaf1-4294-bec1-ec504cdf1810\") " pod="openstack/ovn-controller-metrics-whtm9" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.002473 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebded868-aaf1-4294-bec1-ec504cdf1810-config\") pod \"ovn-controller-metrics-whtm9\" (UID: \"ebded868-aaf1-4294-bec1-ec504cdf1810\") " pod="openstack/ovn-controller-metrics-whtm9" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.002496 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/336f4b9b-f373-49dc-879e-498fc791863d-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-cvdpt\" (UID: \"336f4b9b-f373-49dc-879e-498fc791863d\") " pod="openstack/dnsmasq-dns-6bc7876d45-cvdpt" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.002515 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebded868-aaf1-4294-bec1-ec504cdf1810-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-whtm9\" (UID: \"ebded868-aaf1-4294-bec1-ec504cdf1810\") " pod="openstack/ovn-controller-metrics-whtm9" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.002542 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/ebded868-aaf1-4294-bec1-ec504cdf1810-ovs-rundir\") pod \"ovn-controller-metrics-whtm9\" (UID: \"ebded868-aaf1-4294-bec1-ec504cdf1810\") " pod="openstack/ovn-controller-metrics-whtm9" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.002573 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebded868-aaf1-4294-bec1-ec504cdf1810-combined-ca-bundle\") pod \"ovn-controller-metrics-whtm9\" (UID: \"ebded868-aaf1-4294-bec1-ec504cdf1810\") " pod="openstack/ovn-controller-metrics-whtm9" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.002598 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4kxkm\" (UniqueName: \"kubernetes.io/projected/336f4b9b-f373-49dc-879e-498fc791863d-kube-api-access-4kxkm\") pod \"dnsmasq-dns-6bc7876d45-cvdpt\" (UID: \"336f4b9b-f373-49dc-879e-498fc791863d\") " pod="openstack/dnsmasq-dns-6bc7876d45-cvdpt" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.082599 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-2bzx4"] Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.106812 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/336f4b9b-f373-49dc-879e-498fc791863d-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-cvdpt\" (UID: \"336f4b9b-f373-49dc-879e-498fc791863d\") " pod="openstack/dnsmasq-dns-6bc7876d45-cvdpt" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.106856 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h64qz\" (UniqueName: \"kubernetes.io/projected/ebded868-aaf1-4294-bec1-ec504cdf1810-kube-api-access-h64qz\") pod \"ovn-controller-metrics-whtm9\" (UID: \"ebded868-aaf1-4294-bec1-ec504cdf1810\") " pod="openstack/ovn-controller-metrics-whtm9" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.106890 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/336f4b9b-f373-49dc-879e-498fc791863d-config\") pod \"dnsmasq-dns-6bc7876d45-cvdpt\" (UID: \"336f4b9b-f373-49dc-879e-498fc791863d\") " pod="openstack/dnsmasq-dns-6bc7876d45-cvdpt" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.106916 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/ebded868-aaf1-4294-bec1-ec504cdf1810-ovn-rundir\") pod \"ovn-controller-metrics-whtm9\" (UID: \"ebded868-aaf1-4294-bec1-ec504cdf1810\") " pod="openstack/ovn-controller-metrics-whtm9" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.106930 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebded868-aaf1-4294-bec1-ec504cdf1810-config\") pod \"ovn-controller-metrics-whtm9\" (UID: \"ebded868-aaf1-4294-bec1-ec504cdf1810\") " pod="openstack/ovn-controller-metrics-whtm9" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.106955 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/336f4b9b-f373-49dc-879e-498fc791863d-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-cvdpt\" (UID: \"336f4b9b-f373-49dc-879e-498fc791863d\") " pod="openstack/dnsmasq-dns-6bc7876d45-cvdpt" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.106974 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebded868-aaf1-4294-bec1-ec504cdf1810-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-whtm9\" (UID: \"ebded868-aaf1-4294-bec1-ec504cdf1810\") " pod="openstack/ovn-controller-metrics-whtm9" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.107004 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/ebded868-aaf1-4294-bec1-ec504cdf1810-ovs-rundir\") pod \"ovn-controller-metrics-whtm9\" (UID: \"ebded868-aaf1-4294-bec1-ec504cdf1810\") " pod="openstack/ovn-controller-metrics-whtm9" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.107034 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebded868-aaf1-4294-bec1-ec504cdf1810-combined-ca-bundle\") pod \"ovn-controller-metrics-whtm9\" (UID: \"ebded868-aaf1-4294-bec1-ec504cdf1810\") " pod="openstack/ovn-controller-metrics-whtm9" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.107069 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4kxkm\" (UniqueName: \"kubernetes.io/projected/336f4b9b-f373-49dc-879e-498fc791863d-kube-api-access-4kxkm\") pod \"dnsmasq-dns-6bc7876d45-cvdpt\" (UID: \"336f4b9b-f373-49dc-879e-498fc791863d\") " pod="openstack/dnsmasq-dns-6bc7876d45-cvdpt" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.108040 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/336f4b9b-f373-49dc-879e-498fc791863d-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-cvdpt\" (UID: \"336f4b9b-f373-49dc-879e-498fc791863d\") " pod="openstack/dnsmasq-dns-6bc7876d45-cvdpt" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.108036 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebded868-aaf1-4294-bec1-ec504cdf1810-config\") pod \"ovn-controller-metrics-whtm9\" (UID: \"ebded868-aaf1-4294-bec1-ec504cdf1810\") " pod="openstack/ovn-controller-metrics-whtm9" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.108289 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/ebded868-aaf1-4294-bec1-ec504cdf1810-ovn-rundir\") pod \"ovn-controller-metrics-whtm9\" (UID: \"ebded868-aaf1-4294-bec1-ec504cdf1810\") " pod="openstack/ovn-controller-metrics-whtm9" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.108292 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/ebded868-aaf1-4294-bec1-ec504cdf1810-ovs-rundir\") pod \"ovn-controller-metrics-whtm9\" (UID: \"ebded868-aaf1-4294-bec1-ec504cdf1810\") " pod="openstack/ovn-controller-metrics-whtm9" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.108653 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/336f4b9b-f373-49dc-879e-498fc791863d-config\") pod \"dnsmasq-dns-6bc7876d45-cvdpt\" (UID: \"336f4b9b-f373-49dc-879e-498fc791863d\") " pod="openstack/dnsmasq-dns-6bc7876d45-cvdpt" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.109148 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/336f4b9b-f373-49dc-879e-498fc791863d-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-cvdpt\" (UID: \"336f4b9b-f373-49dc-879e-498fc791863d\") " pod="openstack/dnsmasq-dns-6bc7876d45-cvdpt" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.117529 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebded868-aaf1-4294-bec1-ec504cdf1810-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-whtm9\" (UID: \"ebded868-aaf1-4294-bec1-ec504cdf1810\") " pod="openstack/ovn-controller-metrics-whtm9" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.123673 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebded868-aaf1-4294-bec1-ec504cdf1810-combined-ca-bundle\") pod \"ovn-controller-metrics-whtm9\" (UID: \"ebded868-aaf1-4294-bec1-ec504cdf1810\") " pod="openstack/ovn-controller-metrics-whtm9" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.131801 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4kxkm\" (UniqueName: \"kubernetes.io/projected/336f4b9b-f373-49dc-879e-498fc791863d-kube-api-access-4kxkm\") pod \"dnsmasq-dns-6bc7876d45-cvdpt\" (UID: \"336f4b9b-f373-49dc-879e-498fc791863d\") " pod="openstack/dnsmasq-dns-6bc7876d45-cvdpt" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.131869 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8554648995-ccctr"] Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.133383 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-ccctr" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.144666 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.152628 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h64qz\" (UniqueName: \"kubernetes.io/projected/ebded868-aaf1-4294-bec1-ec504cdf1810-kube-api-access-h64qz\") pod \"ovn-controller-metrics-whtm9\" (UID: \"ebded868-aaf1-4294-bec1-ec504cdf1810\") " pod="openstack/ovn-controller-metrics-whtm9" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.211529 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55f9m\" (UniqueName: \"kubernetes.io/projected/b9048f3f-eef4-4fa6-933c-93c7ee484ae9-kube-api-access-55f9m\") pod \"dnsmasq-dns-8554648995-ccctr\" (UID: \"b9048f3f-eef4-4fa6-933c-93c7ee484ae9\") " pod="openstack/dnsmasq-dns-8554648995-ccctr" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.211572 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b9048f3f-eef4-4fa6-933c-93c7ee484ae9-dns-svc\") pod \"dnsmasq-dns-8554648995-ccctr\" (UID: \"b9048f3f-eef4-4fa6-933c-93c7ee484ae9\") " pod="openstack/dnsmasq-dns-8554648995-ccctr" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.211654 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b9048f3f-eef4-4fa6-933c-93c7ee484ae9-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-ccctr\" (UID: \"b9048f3f-eef4-4fa6-933c-93c7ee484ae9\") " pod="openstack/dnsmasq-dns-8554648995-ccctr" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.211683 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b9048f3f-eef4-4fa6-933c-93c7ee484ae9-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-ccctr\" (UID: \"b9048f3f-eef4-4fa6-933c-93c7ee484ae9\") " pod="openstack/dnsmasq-dns-8554648995-ccctr" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.211703 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9048f3f-eef4-4fa6-933c-93c7ee484ae9-config\") pod \"dnsmasq-dns-8554648995-ccctr\" (UID: \"b9048f3f-eef4-4fa6-933c-93c7ee484ae9\") " pod="openstack/dnsmasq-dns-8554648995-ccctr" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.211792 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-ccctr"] Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.248371 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-whtm9" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.253486 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-cvdpt" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.313306 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b9048f3f-eef4-4fa6-933c-93c7ee484ae9-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-ccctr\" (UID: \"b9048f3f-eef4-4fa6-933c-93c7ee484ae9\") " pod="openstack/dnsmasq-dns-8554648995-ccctr" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.313564 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b9048f3f-eef4-4fa6-933c-93c7ee484ae9-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-ccctr\" (UID: \"b9048f3f-eef4-4fa6-933c-93c7ee484ae9\") " pod="openstack/dnsmasq-dns-8554648995-ccctr" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.313586 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9048f3f-eef4-4fa6-933c-93c7ee484ae9-config\") pod \"dnsmasq-dns-8554648995-ccctr\" (UID: \"b9048f3f-eef4-4fa6-933c-93c7ee484ae9\") " pod="openstack/dnsmasq-dns-8554648995-ccctr" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.313826 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55f9m\" (UniqueName: \"kubernetes.io/projected/b9048f3f-eef4-4fa6-933c-93c7ee484ae9-kube-api-access-55f9m\") pod \"dnsmasq-dns-8554648995-ccctr\" (UID: \"b9048f3f-eef4-4fa6-933c-93c7ee484ae9\") " pod="openstack/dnsmasq-dns-8554648995-ccctr" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.313859 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b9048f3f-eef4-4fa6-933c-93c7ee484ae9-dns-svc\") pod \"dnsmasq-dns-8554648995-ccctr\" (UID: \"b9048f3f-eef4-4fa6-933c-93c7ee484ae9\") " pod="openstack/dnsmasq-dns-8554648995-ccctr" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.314676 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b9048f3f-eef4-4fa6-933c-93c7ee484ae9-dns-svc\") pod \"dnsmasq-dns-8554648995-ccctr\" (UID: \"b9048f3f-eef4-4fa6-933c-93c7ee484ae9\") " pod="openstack/dnsmasq-dns-8554648995-ccctr" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.315085 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b9048f3f-eef4-4fa6-933c-93c7ee484ae9-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-ccctr\" (UID: \"b9048f3f-eef4-4fa6-933c-93c7ee484ae9\") " pod="openstack/dnsmasq-dns-8554648995-ccctr" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.315250 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b9048f3f-eef4-4fa6-933c-93c7ee484ae9-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-ccctr\" (UID: \"b9048f3f-eef4-4fa6-933c-93c7ee484ae9\") " pod="openstack/dnsmasq-dns-8554648995-ccctr" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.315656 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9048f3f-eef4-4fa6-933c-93c7ee484ae9-config\") pod \"dnsmasq-dns-8554648995-ccctr\" (UID: \"b9048f3f-eef4-4fa6-933c-93c7ee484ae9\") " pod="openstack/dnsmasq-dns-8554648995-ccctr" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.331588 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55f9m\" (UniqueName: \"kubernetes.io/projected/b9048f3f-eef4-4fa6-933c-93c7ee484ae9-kube-api-access-55f9m\") pod \"dnsmasq-dns-8554648995-ccctr\" (UID: \"b9048f3f-eef4-4fa6-933c-93c7ee484ae9\") " pod="openstack/dnsmasq-dns-8554648995-ccctr" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.362025 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-89nxv" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.414782 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2-config\") pod \"ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2\" (UID: \"ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2\") " Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.414900 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xwln2\" (UniqueName: \"kubernetes.io/projected/ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2-kube-api-access-xwln2\") pod \"ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2\" (UID: \"ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2\") " Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.414936 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2-dns-svc\") pod \"ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2\" (UID: \"ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2\") " Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.415646 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2" (UID: "ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.416252 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2-config" (OuterVolumeSpecName: "config") pod "ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2" (UID: "ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.419342 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2-kube-api-access-xwln2" (OuterVolumeSpecName: "kube-api-access-xwln2") pod "ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2" (UID: "ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2"). InnerVolumeSpecName "kube-api-access-xwln2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.481547 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-ccctr" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.516959 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.517004 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xwln2\" (UniqueName: \"kubernetes.io/projected/ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2-kube-api-access-xwln2\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.517013 4742 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.554986 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"85632fad-1ab6-495e-9049-6b5dad9cc955","Type":"ContainerStarted","Data":"b277c72b70d74360bc1397123819d6812ef188799046cfdc94d0b900a6266650"} Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.567797 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"ec392288-7e80-4956-836c-d400d4460ebc","Type":"ContainerStarted","Data":"b91b925cb775f3c104773cf1cf7bcfec00234be6262a07996845dc0aa637e20b"} Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.568472 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.569963 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-89nxv" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.571007 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-89nxv" event={"ID":"ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2","Type":"ContainerDied","Data":"f2ae7112a101490a8573a89e68a26b07deaf551064bd1d3cdc4ff6824f104423"} Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.596848 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=2.826576794 podStartE2EDuration="42.59683215s" podCreationTimestamp="2025-12-05 06:09:44 +0000 UTC" firstStartedPulling="2025-12-05 06:09:45.585050438 +0000 UTC m=+1061.497185490" lastFinishedPulling="2025-12-05 06:10:25.355305784 +0000 UTC m=+1101.267440846" observedRunningTime="2025-12-05 06:10:26.596648686 +0000 UTC m=+1102.508783758" watchObservedRunningTime="2025-12-05 06:10:26.59683215 +0000 UTC m=+1102.508967212" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.640595 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-2bzx4" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.658887 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.695095 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-89nxv"] Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.697688 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-89nxv"] Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.710594 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-cvdpt"] Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.723406 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.768504 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-whtm9"] Dec 05 06:10:26 crc kubenswrapper[4742]: W1205 06:10:26.783087 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podebded868_aaf1_4294_bec1_ec504cdf1810.slice/crio-c63607bf86ec29f4baa0f1d8167800eb0410440e83bfca824ac57e48ea4ae6b2 WatchSource:0}: Error finding container c63607bf86ec29f4baa0f1d8167800eb0410440e83bfca824ac57e48ea4ae6b2: Status 404 returned error can't find the container with id c63607bf86ec29f4baa0f1d8167800eb0410440e83bfca824ac57e48ea4ae6b2 Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.795879 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.822850 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7df0e37d-ee7e-4e3f-a797-703bc6b39545-dns-svc\") pod \"7df0e37d-ee7e-4e3f-a797-703bc6b39545\" (UID: \"7df0e37d-ee7e-4e3f-a797-703bc6b39545\") " Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.822951 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7df0e37d-ee7e-4e3f-a797-703bc6b39545-config\") pod \"7df0e37d-ee7e-4e3f-a797-703bc6b39545\" (UID: \"7df0e37d-ee7e-4e3f-a797-703bc6b39545\") " Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.823036 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cpc56\" (UniqueName: \"kubernetes.io/projected/7df0e37d-ee7e-4e3f-a797-703bc6b39545-kube-api-access-cpc56\") pod \"7df0e37d-ee7e-4e3f-a797-703bc6b39545\" (UID: \"7df0e37d-ee7e-4e3f-a797-703bc6b39545\") " Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.823324 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7df0e37d-ee7e-4e3f-a797-703bc6b39545-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7df0e37d-ee7e-4e3f-a797-703bc6b39545" (UID: "7df0e37d-ee7e-4e3f-a797-703bc6b39545"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.823330 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7df0e37d-ee7e-4e3f-a797-703bc6b39545-config" (OuterVolumeSpecName: "config") pod "7df0e37d-ee7e-4e3f-a797-703bc6b39545" (UID: "7df0e37d-ee7e-4e3f-a797-703bc6b39545"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.823448 4742 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7df0e37d-ee7e-4e3f-a797-703bc6b39545-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.823470 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7df0e37d-ee7e-4e3f-a797-703bc6b39545-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.825992 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7df0e37d-ee7e-4e3f-a797-703bc6b39545-kube-api-access-cpc56" (OuterVolumeSpecName: "kube-api-access-cpc56") pod "7df0e37d-ee7e-4e3f-a797-703bc6b39545" (UID: "7df0e37d-ee7e-4e3f-a797-703bc6b39545"). InnerVolumeSpecName "kube-api-access-cpc56". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:10:26 crc kubenswrapper[4742]: I1205 06:10:26.924802 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cpc56\" (UniqueName: \"kubernetes.io/projected/7df0e37d-ee7e-4e3f-a797-703bc6b39545-kube-api-access-cpc56\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:27 crc kubenswrapper[4742]: I1205 06:10:27.011973 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-ccctr"] Dec 05 06:10:27 crc kubenswrapper[4742]: W1205 06:10:27.015094 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb9048f3f_eef4_4fa6_933c_93c7ee484ae9.slice/crio-680f02453e0e28d776b3a83346414cd65e5b29e43ee403f425be298dd883c786 WatchSource:0}: Error finding container 680f02453e0e28d776b3a83346414cd65e5b29e43ee403f425be298dd883c786: Status 404 returned error can't find the container with id 680f02453e0e28d776b3a83346414cd65e5b29e43ee403f425be298dd883c786 Dec 05 06:10:27 crc kubenswrapper[4742]: I1205 06:10:27.579576 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-cvdpt" event={"ID":"336f4b9b-f373-49dc-879e-498fc791863d","Type":"ContainerStarted","Data":"bbdb1a9f0f45dc91ba979863d7191b6a32b4c3e1e80a82530e280f467733039b"} Dec 05 06:10:27 crc kubenswrapper[4742]: I1205 06:10:27.580957 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-ccctr" event={"ID":"b9048f3f-eef4-4fa6-933c-93c7ee484ae9","Type":"ContainerStarted","Data":"680f02453e0e28d776b3a83346414cd65e5b29e43ee403f425be298dd883c786"} Dec 05 06:10:27 crc kubenswrapper[4742]: I1205 06:10:27.583827 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-whtm9" event={"ID":"ebded868-aaf1-4294-bec1-ec504cdf1810","Type":"ContainerStarted","Data":"a1893c421b54a7a4f57b1e37935b532c024cdde49cad64c136f9c853dae146fa"} Dec 05 06:10:27 crc kubenswrapper[4742]: I1205 06:10:27.583865 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-whtm9" event={"ID":"ebded868-aaf1-4294-bec1-ec504cdf1810","Type":"ContainerStarted","Data":"c63607bf86ec29f4baa0f1d8167800eb0410440e83bfca824ac57e48ea4ae6b2"} Dec 05 06:10:27 crc kubenswrapper[4742]: I1205 06:10:27.587536 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-2bzx4" Dec 05 06:10:27 crc kubenswrapper[4742]: I1205 06:10:27.587857 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-2bzx4" event={"ID":"7df0e37d-ee7e-4e3f-a797-703bc6b39545","Type":"ContainerDied","Data":"b57b540de49dc6096b9202199340d4f4f5f2d89f4b6d1d74a0c469e188bd395d"} Dec 05 06:10:27 crc kubenswrapper[4742]: I1205 06:10:27.587893 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 05 06:10:27 crc kubenswrapper[4742]: I1205 06:10:27.639958 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 05 06:10:27 crc kubenswrapper[4742]: I1205 06:10:27.671805 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-whtm9" podStartSLOduration=2.6717879289999997 podStartE2EDuration="2.671787929s" podCreationTimestamp="2025-12-05 06:10:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:10:27.60055385 +0000 UTC m=+1103.512688922" watchObservedRunningTime="2025-12-05 06:10:27.671787929 +0000 UTC m=+1103.583922991" Dec 05 06:10:27 crc kubenswrapper[4742]: I1205 06:10:27.700703 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-2bzx4"] Dec 05 06:10:27 crc kubenswrapper[4742]: I1205 06:10:27.712020 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-2bzx4"] Dec 05 06:10:27 crc kubenswrapper[4742]: I1205 06:10:27.981831 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 05 06:10:27 crc kubenswrapper[4742]: I1205 06:10:27.983798 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 05 06:10:27 crc kubenswrapper[4742]: I1205 06:10:27.987214 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 05 06:10:27 crc kubenswrapper[4742]: I1205 06:10:27.987494 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 05 06:10:27 crc kubenswrapper[4742]: I1205 06:10:27.988301 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 05 06:10:27 crc kubenswrapper[4742]: I1205 06:10:27.989353 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-f2j52" Dec 05 06:10:28 crc kubenswrapper[4742]: I1205 06:10:28.011723 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 06:10:28 crc kubenswrapper[4742]: I1205 06:10:28.047217 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e7d76df0-4f21-4729-9729-1f2ff54a8332-scripts\") pod \"ovn-northd-0\" (UID: \"e7d76df0-4f21-4729-9729-1f2ff54a8332\") " pod="openstack/ovn-northd-0" Dec 05 06:10:28 crc kubenswrapper[4742]: I1205 06:10:28.047260 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7d76df0-4f21-4729-9729-1f2ff54a8332-config\") pod \"ovn-northd-0\" (UID: \"e7d76df0-4f21-4729-9729-1f2ff54a8332\") " pod="openstack/ovn-northd-0" Dec 05 06:10:28 crc kubenswrapper[4742]: I1205 06:10:28.047444 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7d76df0-4f21-4729-9729-1f2ff54a8332-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"e7d76df0-4f21-4729-9729-1f2ff54a8332\") " pod="openstack/ovn-northd-0" Dec 05 06:10:28 crc kubenswrapper[4742]: I1205 06:10:28.047505 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e7d76df0-4f21-4729-9729-1f2ff54a8332-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"e7d76df0-4f21-4729-9729-1f2ff54a8332\") " pod="openstack/ovn-northd-0" Dec 05 06:10:28 crc kubenswrapper[4742]: I1205 06:10:28.047592 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2smw\" (UniqueName: \"kubernetes.io/projected/e7d76df0-4f21-4729-9729-1f2ff54a8332-kube-api-access-q2smw\") pod \"ovn-northd-0\" (UID: \"e7d76df0-4f21-4729-9729-1f2ff54a8332\") " pod="openstack/ovn-northd-0" Dec 05 06:10:28 crc kubenswrapper[4742]: I1205 06:10:28.047694 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7d76df0-4f21-4729-9729-1f2ff54a8332-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"e7d76df0-4f21-4729-9729-1f2ff54a8332\") " pod="openstack/ovn-northd-0" Dec 05 06:10:28 crc kubenswrapper[4742]: I1205 06:10:28.047812 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7d76df0-4f21-4729-9729-1f2ff54a8332-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"e7d76df0-4f21-4729-9729-1f2ff54a8332\") " pod="openstack/ovn-northd-0" Dec 05 06:10:28 crc kubenswrapper[4742]: I1205 06:10:28.149145 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7d76df0-4f21-4729-9729-1f2ff54a8332-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"e7d76df0-4f21-4729-9729-1f2ff54a8332\") " pod="openstack/ovn-northd-0" Dec 05 06:10:28 crc kubenswrapper[4742]: I1205 06:10:28.149259 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e7d76df0-4f21-4729-9729-1f2ff54a8332-scripts\") pod \"ovn-northd-0\" (UID: \"e7d76df0-4f21-4729-9729-1f2ff54a8332\") " pod="openstack/ovn-northd-0" Dec 05 06:10:28 crc kubenswrapper[4742]: I1205 06:10:28.149292 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7d76df0-4f21-4729-9729-1f2ff54a8332-config\") pod \"ovn-northd-0\" (UID: \"e7d76df0-4f21-4729-9729-1f2ff54a8332\") " pod="openstack/ovn-northd-0" Dec 05 06:10:28 crc kubenswrapper[4742]: I1205 06:10:28.149341 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7d76df0-4f21-4729-9729-1f2ff54a8332-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"e7d76df0-4f21-4729-9729-1f2ff54a8332\") " pod="openstack/ovn-northd-0" Dec 05 06:10:28 crc kubenswrapper[4742]: I1205 06:10:28.149361 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e7d76df0-4f21-4729-9729-1f2ff54a8332-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"e7d76df0-4f21-4729-9729-1f2ff54a8332\") " pod="openstack/ovn-northd-0" Dec 05 06:10:28 crc kubenswrapper[4742]: I1205 06:10:28.149389 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2smw\" (UniqueName: \"kubernetes.io/projected/e7d76df0-4f21-4729-9729-1f2ff54a8332-kube-api-access-q2smw\") pod \"ovn-northd-0\" (UID: \"e7d76df0-4f21-4729-9729-1f2ff54a8332\") " pod="openstack/ovn-northd-0" Dec 05 06:10:28 crc kubenswrapper[4742]: I1205 06:10:28.149434 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7d76df0-4f21-4729-9729-1f2ff54a8332-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"e7d76df0-4f21-4729-9729-1f2ff54a8332\") " pod="openstack/ovn-northd-0" Dec 05 06:10:28 crc kubenswrapper[4742]: I1205 06:10:28.150480 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e7d76df0-4f21-4729-9729-1f2ff54a8332-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"e7d76df0-4f21-4729-9729-1f2ff54a8332\") " pod="openstack/ovn-northd-0" Dec 05 06:10:28 crc kubenswrapper[4742]: I1205 06:10:28.150544 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e7d76df0-4f21-4729-9729-1f2ff54a8332-scripts\") pod \"ovn-northd-0\" (UID: \"e7d76df0-4f21-4729-9729-1f2ff54a8332\") " pod="openstack/ovn-northd-0" Dec 05 06:10:28 crc kubenswrapper[4742]: I1205 06:10:28.151367 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7d76df0-4f21-4729-9729-1f2ff54a8332-config\") pod \"ovn-northd-0\" (UID: \"e7d76df0-4f21-4729-9729-1f2ff54a8332\") " pod="openstack/ovn-northd-0" Dec 05 06:10:28 crc kubenswrapper[4742]: I1205 06:10:28.154674 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7d76df0-4f21-4729-9729-1f2ff54a8332-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"e7d76df0-4f21-4729-9729-1f2ff54a8332\") " pod="openstack/ovn-northd-0" Dec 05 06:10:28 crc kubenswrapper[4742]: I1205 06:10:28.154757 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7d76df0-4f21-4729-9729-1f2ff54a8332-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"e7d76df0-4f21-4729-9729-1f2ff54a8332\") " pod="openstack/ovn-northd-0" Dec 05 06:10:28 crc kubenswrapper[4742]: I1205 06:10:28.155304 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7d76df0-4f21-4729-9729-1f2ff54a8332-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"e7d76df0-4f21-4729-9729-1f2ff54a8332\") " pod="openstack/ovn-northd-0" Dec 05 06:10:28 crc kubenswrapper[4742]: I1205 06:10:28.174045 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2smw\" (UniqueName: \"kubernetes.io/projected/e7d76df0-4f21-4729-9729-1f2ff54a8332-kube-api-access-q2smw\") pod \"ovn-northd-0\" (UID: \"e7d76df0-4f21-4729-9729-1f2ff54a8332\") " pod="openstack/ovn-northd-0" Dec 05 06:10:28 crc kubenswrapper[4742]: I1205 06:10:28.307029 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 05 06:10:28 crc kubenswrapper[4742]: I1205 06:10:28.404766 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7df0e37d-ee7e-4e3f-a797-703bc6b39545" path="/var/lib/kubelet/pods/7df0e37d-ee7e-4e3f-a797-703bc6b39545/volumes" Dec 05 06:10:28 crc kubenswrapper[4742]: I1205 06:10:28.405269 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2" path="/var/lib/kubelet/pods/ba3490c6-4f5c-43fc-bb5a-f60ab3dc12c2/volumes" Dec 05 06:10:28 crc kubenswrapper[4742]: I1205 06:10:28.598507 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c4ba4170-0240-42d9-85f4-cf3587f39f02","Type":"ContainerStarted","Data":"8e50439d78583b7a689e999179b4561634fceaa140273853e0ddc31e44263528"} Dec 05 06:10:28 crc kubenswrapper[4742]: I1205 06:10:28.600040 4742 generic.go:334] "Generic (PLEG): container finished" podID="336f4b9b-f373-49dc-879e-498fc791863d" containerID="ea55929b4073a7bd62bf5b3bee99562904e311e919d539667dc6dabb8b274fe3" exitCode=0 Dec 05 06:10:28 crc kubenswrapper[4742]: I1205 06:10:28.600086 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-cvdpt" event={"ID":"336f4b9b-f373-49dc-879e-498fc791863d","Type":"ContainerDied","Data":"ea55929b4073a7bd62bf5b3bee99562904e311e919d539667dc6dabb8b274fe3"} Dec 05 06:10:28 crc kubenswrapper[4742]: I1205 06:10:28.605413 4742 generic.go:334] "Generic (PLEG): container finished" podID="b9048f3f-eef4-4fa6-933c-93c7ee484ae9" containerID="75a911ef5516c55fd4f62824d78ef9c59b29ec073a189aa7cb7c9c220fe8a982" exitCode=0 Dec 05 06:10:28 crc kubenswrapper[4742]: I1205 06:10:28.605490 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-ccctr" event={"ID":"b9048f3f-eef4-4fa6-933c-93c7ee484ae9","Type":"ContainerDied","Data":"75a911ef5516c55fd4f62824d78ef9c59b29ec073a189aa7cb7c9c220fe8a982"} Dec 05 06:10:28 crc kubenswrapper[4742]: I1205 06:10:28.612135 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e","Type":"ContainerStarted","Data":"e37747c80db44fb441e66ad6045bb07df3a0d78b12b382201b8a54d8b6957d0b"} Dec 05 06:10:28 crc kubenswrapper[4742]: I1205 06:10:28.788128 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 06:10:29 crc kubenswrapper[4742]: I1205 06:10:29.623623 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-cvdpt" event={"ID":"336f4b9b-f373-49dc-879e-498fc791863d","Type":"ContainerStarted","Data":"86ac2b863b318cfd483f302f78f0744ce1f4469f05d53c58b0d8d6507cab9615"} Dec 05 06:10:29 crc kubenswrapper[4742]: I1205 06:10:29.624471 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6bc7876d45-cvdpt" Dec 05 06:10:29 crc kubenswrapper[4742]: I1205 06:10:29.625445 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"e7d76df0-4f21-4729-9729-1f2ff54a8332","Type":"ContainerStarted","Data":"7ea261b8b9f58f9a8bd26bbf4a01a3d6327f6c7c4ca52ef53507fcb426ec512a"} Dec 05 06:10:29 crc kubenswrapper[4742]: I1205 06:10:29.629313 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-ccctr" event={"ID":"b9048f3f-eef4-4fa6-933c-93c7ee484ae9","Type":"ContainerStarted","Data":"7e06a33d5567943615a3daf51ff89aefcabdb6c028afac99704b73e3f7ce1ba1"} Dec 05 06:10:29 crc kubenswrapper[4742]: I1205 06:10:29.660529 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8554648995-ccctr" podStartSLOduration=3.168485238 podStartE2EDuration="3.660514109s" podCreationTimestamp="2025-12-05 06:10:26 +0000 UTC" firstStartedPulling="2025-12-05 06:10:27.017571591 +0000 UTC m=+1102.929706653" lastFinishedPulling="2025-12-05 06:10:27.509600452 +0000 UTC m=+1103.421735524" observedRunningTime="2025-12-05 06:10:29.658821574 +0000 UTC m=+1105.570956646" watchObservedRunningTime="2025-12-05 06:10:29.660514109 +0000 UTC m=+1105.572649171" Dec 05 06:10:29 crc kubenswrapper[4742]: I1205 06:10:29.661761 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6bc7876d45-cvdpt" podStartSLOduration=4.10925365 podStartE2EDuration="4.661754852s" podCreationTimestamp="2025-12-05 06:10:25 +0000 UTC" firstStartedPulling="2025-12-05 06:10:26.71460617 +0000 UTC m=+1102.626741232" lastFinishedPulling="2025-12-05 06:10:27.267107372 +0000 UTC m=+1103.179242434" observedRunningTime="2025-12-05 06:10:29.641153495 +0000 UTC m=+1105.553288567" watchObservedRunningTime="2025-12-05 06:10:29.661754852 +0000 UTC m=+1105.573889904" Dec 05 06:10:30 crc kubenswrapper[4742]: I1205 06:10:30.078497 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 05 06:10:30 crc kubenswrapper[4742]: I1205 06:10:30.640932 4742 generic.go:334] "Generic (PLEG): container finished" podID="85632fad-1ab6-495e-9049-6b5dad9cc955" containerID="b277c72b70d74360bc1397123819d6812ef188799046cfdc94d0b900a6266650" exitCode=0 Dec 05 06:10:30 crc kubenswrapper[4742]: I1205 06:10:30.641024 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"85632fad-1ab6-495e-9049-6b5dad9cc955","Type":"ContainerDied","Data":"b277c72b70d74360bc1397123819d6812ef188799046cfdc94d0b900a6266650"} Dec 05 06:10:30 crc kubenswrapper[4742]: I1205 06:10:30.643908 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7b5d8165-e06e-4600-9cab-9cf84c010725","Type":"ContainerStarted","Data":"2a73506fa683772c445e145b7336056dc8c87df69830067f0bc2e973540b7546"} Dec 05 06:10:30 crc kubenswrapper[4742]: I1205 06:10:30.644552 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8554648995-ccctr" Dec 05 06:10:31 crc kubenswrapper[4742]: I1205 06:10:31.653938 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"85632fad-1ab6-495e-9049-6b5dad9cc955","Type":"ContainerStarted","Data":"43f59b0dd0673acbf1e8b1cb19d732574d31c64af500c88a90c6c1409d92d526"} Dec 05 06:10:31 crc kubenswrapper[4742]: I1205 06:10:31.682741 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=7.9521439879999996 podStartE2EDuration="48.682714422s" podCreationTimestamp="2025-12-05 06:09:43 +0000 UTC" firstStartedPulling="2025-12-05 06:09:45.073155825 +0000 UTC m=+1060.985290887" lastFinishedPulling="2025-12-05 06:10:25.803726259 +0000 UTC m=+1101.715861321" observedRunningTime="2025-12-05 06:10:31.679315902 +0000 UTC m=+1107.591450994" watchObservedRunningTime="2025-12-05 06:10:31.682714422 +0000 UTC m=+1107.594849524" Dec 05 06:10:32 crc kubenswrapper[4742]: I1205 06:10:32.664376 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"e7d76df0-4f21-4729-9729-1f2ff54a8332","Type":"ContainerStarted","Data":"d1934753cd07a71a87a5b0d5f8a6aecf1b11e12621dc578d17d0fc95dbb8f143"} Dec 05 06:10:32 crc kubenswrapper[4742]: I1205 06:10:32.664752 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 05 06:10:32 crc kubenswrapper[4742]: I1205 06:10:32.664762 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"e7d76df0-4f21-4729-9729-1f2ff54a8332","Type":"ContainerStarted","Data":"caa9cfd6937fda940888bb64cbccaa6adf27580ea4e177e3d3adf4b5e4e8b93d"} Dec 05 06:10:32 crc kubenswrapper[4742]: I1205 06:10:32.684635 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.383363646 podStartE2EDuration="5.684611695s" podCreationTimestamp="2025-12-05 06:10:27 +0000 UTC" firstStartedPulling="2025-12-05 06:10:28.79918222 +0000 UTC m=+1104.711317272" lastFinishedPulling="2025-12-05 06:10:32.100430229 +0000 UTC m=+1108.012565321" observedRunningTime="2025-12-05 06:10:32.684011149 +0000 UTC m=+1108.596146231" watchObservedRunningTime="2025-12-05 06:10:32.684611695 +0000 UTC m=+1108.596746787" Dec 05 06:10:33 crc kubenswrapper[4742]: I1205 06:10:33.680166 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c4ba4170-0240-42d9-85f4-cf3587f39f02","Type":"ContainerDied","Data":"8e50439d78583b7a689e999179b4561634fceaa140273853e0ddc31e44263528"} Dec 05 06:10:33 crc kubenswrapper[4742]: I1205 06:10:33.680165 4742 generic.go:334] "Generic (PLEG): container finished" podID="c4ba4170-0240-42d9-85f4-cf3587f39f02" containerID="8e50439d78583b7a689e999179b4561634fceaa140273853e0ddc31e44263528" exitCode=0 Dec 05 06:10:34 crc kubenswrapper[4742]: I1205 06:10:34.624566 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 05 06:10:34 crc kubenswrapper[4742]: I1205 06:10:34.624942 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 05 06:10:34 crc kubenswrapper[4742]: I1205 06:10:34.692420 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c4ba4170-0240-42d9-85f4-cf3587f39f02","Type":"ContainerStarted","Data":"d650e05da8f6d00412f038e1cc6a4768171d4ecaec420e4cd6c8c0a06f434838"} Dec 05 06:10:34 crc kubenswrapper[4742]: I1205 06:10:34.725580 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=-9223371983.129223 podStartE2EDuration="53.725551994s" podCreationTimestamp="2025-12-05 06:09:41 +0000 UTC" firstStartedPulling="2025-12-05 06:09:43.911562541 +0000 UTC m=+1059.823697603" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:10:34.716411032 +0000 UTC m=+1110.628546104" watchObservedRunningTime="2025-12-05 06:10:34.725551994 +0000 UTC m=+1110.637687066" Dec 05 06:10:36 crc kubenswrapper[4742]: I1205 06:10:36.256585 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6bc7876d45-cvdpt" Dec 05 06:10:36 crc kubenswrapper[4742]: I1205 06:10:36.483155 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8554648995-ccctr" Dec 05 06:10:36 crc kubenswrapper[4742]: I1205 06:10:36.582471 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-cvdpt"] Dec 05 06:10:36 crc kubenswrapper[4742]: I1205 06:10:36.705835 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6bc7876d45-cvdpt" podUID="336f4b9b-f373-49dc-879e-498fc791863d" containerName="dnsmasq-dns" containerID="cri-o://86ac2b863b318cfd483f302f78f0744ce1f4469f05d53c58b0d8d6507cab9615" gracePeriod=10 Dec 05 06:10:36 crc kubenswrapper[4742]: I1205 06:10:36.823384 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-6z8h9"] Dec 05 06:10:36 crc kubenswrapper[4742]: I1205 06:10:36.826223 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-6z8h9" Dec 05 06:10:36 crc kubenswrapper[4742]: I1205 06:10:36.855991 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-6z8h9"] Dec 05 06:10:36 crc kubenswrapper[4742]: I1205 06:10:36.914734 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 05 06:10:36 crc kubenswrapper[4742]: I1205 06:10:36.963752 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/66961e66-3235-4bc8-995e-106a483d8724-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-6z8h9\" (UID: \"66961e66-3235-4bc8-995e-106a483d8724\") " pod="openstack/dnsmasq-dns-b8fbc5445-6z8h9" Dec 05 06:10:36 crc kubenswrapper[4742]: I1205 06:10:36.963799 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/66961e66-3235-4bc8-995e-106a483d8724-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-6z8h9\" (UID: \"66961e66-3235-4bc8-995e-106a483d8724\") " pod="openstack/dnsmasq-dns-b8fbc5445-6z8h9" Dec 05 06:10:36 crc kubenswrapper[4742]: I1205 06:10:36.963820 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66961e66-3235-4bc8-995e-106a483d8724-config\") pod \"dnsmasq-dns-b8fbc5445-6z8h9\" (UID: \"66961e66-3235-4bc8-995e-106a483d8724\") " pod="openstack/dnsmasq-dns-b8fbc5445-6z8h9" Dec 05 06:10:36 crc kubenswrapper[4742]: I1205 06:10:36.963867 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/66961e66-3235-4bc8-995e-106a483d8724-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-6z8h9\" (UID: \"66961e66-3235-4bc8-995e-106a483d8724\") " pod="openstack/dnsmasq-dns-b8fbc5445-6z8h9" Dec 05 06:10:36 crc kubenswrapper[4742]: I1205 06:10:36.963893 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbknq\" (UniqueName: \"kubernetes.io/projected/66961e66-3235-4bc8-995e-106a483d8724-kube-api-access-wbknq\") pod \"dnsmasq-dns-b8fbc5445-6z8h9\" (UID: \"66961e66-3235-4bc8-995e-106a483d8724\") " pod="openstack/dnsmasq-dns-b8fbc5445-6z8h9" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.025640 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.065575 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/66961e66-3235-4bc8-995e-106a483d8724-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-6z8h9\" (UID: \"66961e66-3235-4bc8-995e-106a483d8724\") " pod="openstack/dnsmasq-dns-b8fbc5445-6z8h9" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.065627 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbknq\" (UniqueName: \"kubernetes.io/projected/66961e66-3235-4bc8-995e-106a483d8724-kube-api-access-wbknq\") pod \"dnsmasq-dns-b8fbc5445-6z8h9\" (UID: \"66961e66-3235-4bc8-995e-106a483d8724\") " pod="openstack/dnsmasq-dns-b8fbc5445-6z8h9" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.065705 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/66961e66-3235-4bc8-995e-106a483d8724-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-6z8h9\" (UID: \"66961e66-3235-4bc8-995e-106a483d8724\") " pod="openstack/dnsmasq-dns-b8fbc5445-6z8h9" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.065747 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/66961e66-3235-4bc8-995e-106a483d8724-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-6z8h9\" (UID: \"66961e66-3235-4bc8-995e-106a483d8724\") " pod="openstack/dnsmasq-dns-b8fbc5445-6z8h9" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.065771 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66961e66-3235-4bc8-995e-106a483d8724-config\") pod \"dnsmasq-dns-b8fbc5445-6z8h9\" (UID: \"66961e66-3235-4bc8-995e-106a483d8724\") " pod="openstack/dnsmasq-dns-b8fbc5445-6z8h9" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.066950 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/66961e66-3235-4bc8-995e-106a483d8724-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-6z8h9\" (UID: \"66961e66-3235-4bc8-995e-106a483d8724\") " pod="openstack/dnsmasq-dns-b8fbc5445-6z8h9" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.067133 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/66961e66-3235-4bc8-995e-106a483d8724-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-6z8h9\" (UID: \"66961e66-3235-4bc8-995e-106a483d8724\") " pod="openstack/dnsmasq-dns-b8fbc5445-6z8h9" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.068328 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/66961e66-3235-4bc8-995e-106a483d8724-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-6z8h9\" (UID: \"66961e66-3235-4bc8-995e-106a483d8724\") " pod="openstack/dnsmasq-dns-b8fbc5445-6z8h9" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.068800 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66961e66-3235-4bc8-995e-106a483d8724-config\") pod \"dnsmasq-dns-b8fbc5445-6z8h9\" (UID: \"66961e66-3235-4bc8-995e-106a483d8724\") " pod="openstack/dnsmasq-dns-b8fbc5445-6z8h9" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.085896 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbknq\" (UniqueName: \"kubernetes.io/projected/66961e66-3235-4bc8-995e-106a483d8724-kube-api-access-wbknq\") pod \"dnsmasq-dns-b8fbc5445-6z8h9\" (UID: \"66961e66-3235-4bc8-995e-106a483d8724\") " pod="openstack/dnsmasq-dns-b8fbc5445-6z8h9" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.183213 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-6z8h9" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.267569 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-cvdpt" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.370576 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/336f4b9b-f373-49dc-879e-498fc791863d-config\") pod \"336f4b9b-f373-49dc-879e-498fc791863d\" (UID: \"336f4b9b-f373-49dc-879e-498fc791863d\") " Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.370651 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/336f4b9b-f373-49dc-879e-498fc791863d-ovsdbserver-sb\") pod \"336f4b9b-f373-49dc-879e-498fc791863d\" (UID: \"336f4b9b-f373-49dc-879e-498fc791863d\") " Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.370752 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/336f4b9b-f373-49dc-879e-498fc791863d-dns-svc\") pod \"336f4b9b-f373-49dc-879e-498fc791863d\" (UID: \"336f4b9b-f373-49dc-879e-498fc791863d\") " Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.370804 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4kxkm\" (UniqueName: \"kubernetes.io/projected/336f4b9b-f373-49dc-879e-498fc791863d-kube-api-access-4kxkm\") pod \"336f4b9b-f373-49dc-879e-498fc791863d\" (UID: \"336f4b9b-f373-49dc-879e-498fc791863d\") " Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.387868 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/336f4b9b-f373-49dc-879e-498fc791863d-kube-api-access-4kxkm" (OuterVolumeSpecName: "kube-api-access-4kxkm") pod "336f4b9b-f373-49dc-879e-498fc791863d" (UID: "336f4b9b-f373-49dc-879e-498fc791863d"). InnerVolumeSpecName "kube-api-access-4kxkm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.416978 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/336f4b9b-f373-49dc-879e-498fc791863d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "336f4b9b-f373-49dc-879e-498fc791863d" (UID: "336f4b9b-f373-49dc-879e-498fc791863d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.420586 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/336f4b9b-f373-49dc-879e-498fc791863d-config" (OuterVolumeSpecName: "config") pod "336f4b9b-f373-49dc-879e-498fc791863d" (UID: "336f4b9b-f373-49dc-879e-498fc791863d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.421358 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/336f4b9b-f373-49dc-879e-498fc791863d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "336f4b9b-f373-49dc-879e-498fc791863d" (UID: "336f4b9b-f373-49dc-879e-498fc791863d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.473609 4742 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/336f4b9b-f373-49dc-879e-498fc791863d-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.473642 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4kxkm\" (UniqueName: \"kubernetes.io/projected/336f4b9b-f373-49dc-879e-498fc791863d-kube-api-access-4kxkm\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.473652 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/336f4b9b-f373-49dc-879e-498fc791863d-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.473662 4742 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/336f4b9b-f373-49dc-879e-498fc791863d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.668885 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-6z8h9"] Dec 05 06:10:37 crc kubenswrapper[4742]: W1205 06:10:37.672843 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod66961e66_3235_4bc8_995e_106a483d8724.slice/crio-05b3e3e640582648744ee35dc205a45997888864001f778e28ab7965fac1c631 WatchSource:0}: Error finding container 05b3e3e640582648744ee35dc205a45997888864001f778e28ab7965fac1c631: Status 404 returned error can't find the container with id 05b3e3e640582648744ee35dc205a45997888864001f778e28ab7965fac1c631 Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.714516 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-6z8h9" event={"ID":"66961e66-3235-4bc8-995e-106a483d8724","Type":"ContainerStarted","Data":"05b3e3e640582648744ee35dc205a45997888864001f778e28ab7965fac1c631"} Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.716639 4742 generic.go:334] "Generic (PLEG): container finished" podID="336f4b9b-f373-49dc-879e-498fc791863d" containerID="86ac2b863b318cfd483f302f78f0744ce1f4469f05d53c58b0d8d6507cab9615" exitCode=0 Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.716700 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-cvdpt" event={"ID":"336f4b9b-f373-49dc-879e-498fc791863d","Type":"ContainerDied","Data":"86ac2b863b318cfd483f302f78f0744ce1f4469f05d53c58b0d8d6507cab9615"} Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.716775 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-cvdpt" event={"ID":"336f4b9b-f373-49dc-879e-498fc791863d","Type":"ContainerDied","Data":"bbdb1a9f0f45dc91ba979863d7191b6a32b4c3e1e80a82530e280f467733039b"} Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.716806 4742 scope.go:117] "RemoveContainer" containerID="86ac2b863b318cfd483f302f78f0744ce1f4469f05d53c58b0d8d6507cab9615" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.716726 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-cvdpt" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.751039 4742 scope.go:117] "RemoveContainer" containerID="ea55929b4073a7bd62bf5b3bee99562904e311e919d539667dc6dabb8b274fe3" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.759006 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-cvdpt"] Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.763877 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-cvdpt"] Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.794419 4742 scope.go:117] "RemoveContainer" containerID="86ac2b863b318cfd483f302f78f0744ce1f4469f05d53c58b0d8d6507cab9615" Dec 05 06:10:37 crc kubenswrapper[4742]: E1205 06:10:37.795343 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"86ac2b863b318cfd483f302f78f0744ce1f4469f05d53c58b0d8d6507cab9615\": container with ID starting with 86ac2b863b318cfd483f302f78f0744ce1f4469f05d53c58b0d8d6507cab9615 not found: ID does not exist" containerID="86ac2b863b318cfd483f302f78f0744ce1f4469f05d53c58b0d8d6507cab9615" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.795402 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86ac2b863b318cfd483f302f78f0744ce1f4469f05d53c58b0d8d6507cab9615"} err="failed to get container status \"86ac2b863b318cfd483f302f78f0744ce1f4469f05d53c58b0d8d6507cab9615\": rpc error: code = NotFound desc = could not find container \"86ac2b863b318cfd483f302f78f0744ce1f4469f05d53c58b0d8d6507cab9615\": container with ID starting with 86ac2b863b318cfd483f302f78f0744ce1f4469f05d53c58b0d8d6507cab9615 not found: ID does not exist" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.795436 4742 scope.go:117] "RemoveContainer" containerID="ea55929b4073a7bd62bf5b3bee99562904e311e919d539667dc6dabb8b274fe3" Dec 05 06:10:37 crc kubenswrapper[4742]: E1205 06:10:37.795880 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea55929b4073a7bd62bf5b3bee99562904e311e919d539667dc6dabb8b274fe3\": container with ID starting with ea55929b4073a7bd62bf5b3bee99562904e311e919d539667dc6dabb8b274fe3 not found: ID does not exist" containerID="ea55929b4073a7bd62bf5b3bee99562904e311e919d539667dc6dabb8b274fe3" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.795920 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea55929b4073a7bd62bf5b3bee99562904e311e919d539667dc6dabb8b274fe3"} err="failed to get container status \"ea55929b4073a7bd62bf5b3bee99562904e311e919d539667dc6dabb8b274fe3\": rpc error: code = NotFound desc = could not find container \"ea55929b4073a7bd62bf5b3bee99562904e311e919d539667dc6dabb8b274fe3\": container with ID starting with ea55929b4073a7bd62bf5b3bee99562904e311e919d539667dc6dabb8b274fe3 not found: ID does not exist" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.929889 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 05 06:10:37 crc kubenswrapper[4742]: E1205 06:10:37.930371 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="336f4b9b-f373-49dc-879e-498fc791863d" containerName="init" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.930398 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="336f4b9b-f373-49dc-879e-498fc791863d" containerName="init" Dec 05 06:10:37 crc kubenswrapper[4742]: E1205 06:10:37.930414 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="336f4b9b-f373-49dc-879e-498fc791863d" containerName="dnsmasq-dns" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.930422 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="336f4b9b-f373-49dc-879e-498fc791863d" containerName="dnsmasq-dns" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.930616 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="336f4b9b-f373-49dc-879e-498fc791863d" containerName="dnsmasq-dns" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.936198 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.940280 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.940462 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.940676 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-qpnh9" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.943021 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 05 06:10:37 crc kubenswrapper[4742]: I1205 06:10:37.961119 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 05 06:10:38 crc kubenswrapper[4742]: I1205 06:10:38.083358 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-cache\") pod \"swift-storage-0\" (UID: \"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b\") " pod="openstack/swift-storage-0" Dec 05 06:10:38 crc kubenswrapper[4742]: I1205 06:10:38.083427 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-etc-swift\") pod \"swift-storage-0\" (UID: \"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b\") " pod="openstack/swift-storage-0" Dec 05 06:10:38 crc kubenswrapper[4742]: I1205 06:10:38.083490 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b\") " pod="openstack/swift-storage-0" Dec 05 06:10:38 crc kubenswrapper[4742]: I1205 06:10:38.083520 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vspzr\" (UniqueName: \"kubernetes.io/projected/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-kube-api-access-vspzr\") pod \"swift-storage-0\" (UID: \"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b\") " pod="openstack/swift-storage-0" Dec 05 06:10:38 crc kubenswrapper[4742]: I1205 06:10:38.083568 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-lock\") pod \"swift-storage-0\" (UID: \"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b\") " pod="openstack/swift-storage-0" Dec 05 06:10:38 crc kubenswrapper[4742]: I1205 06:10:38.184902 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-cache\") pod \"swift-storage-0\" (UID: \"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b\") " pod="openstack/swift-storage-0" Dec 05 06:10:38 crc kubenswrapper[4742]: I1205 06:10:38.185000 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-etc-swift\") pod \"swift-storage-0\" (UID: \"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b\") " pod="openstack/swift-storage-0" Dec 05 06:10:38 crc kubenswrapper[4742]: I1205 06:10:38.185071 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b\") " pod="openstack/swift-storage-0" Dec 05 06:10:38 crc kubenswrapper[4742]: I1205 06:10:38.185109 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vspzr\" (UniqueName: \"kubernetes.io/projected/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-kube-api-access-vspzr\") pod \"swift-storage-0\" (UID: \"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b\") " pod="openstack/swift-storage-0" Dec 05 06:10:38 crc kubenswrapper[4742]: I1205 06:10:38.185162 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-lock\") pod \"swift-storage-0\" (UID: \"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b\") " pod="openstack/swift-storage-0" Dec 05 06:10:38 crc kubenswrapper[4742]: E1205 06:10:38.185213 4742 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 06:10:38 crc kubenswrapper[4742]: E1205 06:10:38.185235 4742 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 06:10:38 crc kubenswrapper[4742]: E1205 06:10:38.185301 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-etc-swift podName:f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b nodeName:}" failed. No retries permitted until 2025-12-05 06:10:38.685280112 +0000 UTC m=+1114.597415174 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-etc-swift") pod "swift-storage-0" (UID: "f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b") : configmap "swift-ring-files" not found Dec 05 06:10:38 crc kubenswrapper[4742]: I1205 06:10:38.185503 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-cache\") pod \"swift-storage-0\" (UID: \"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b\") " pod="openstack/swift-storage-0" Dec 05 06:10:38 crc kubenswrapper[4742]: I1205 06:10:38.185582 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-lock\") pod \"swift-storage-0\" (UID: \"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b\") " pod="openstack/swift-storage-0" Dec 05 06:10:38 crc kubenswrapper[4742]: I1205 06:10:38.185712 4742 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/swift-storage-0" Dec 05 06:10:38 crc kubenswrapper[4742]: I1205 06:10:38.228363 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vspzr\" (UniqueName: \"kubernetes.io/projected/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-kube-api-access-vspzr\") pod \"swift-storage-0\" (UID: \"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b\") " pod="openstack/swift-storage-0" Dec 05 06:10:38 crc kubenswrapper[4742]: I1205 06:10:38.233145 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b\") " pod="openstack/swift-storage-0" Dec 05 06:10:38 crc kubenswrapper[4742]: I1205 06:10:38.394501 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="336f4b9b-f373-49dc-879e-498fc791863d" path="/var/lib/kubelet/pods/336f4b9b-f373-49dc-879e-498fc791863d/volumes" Dec 05 06:10:38 crc kubenswrapper[4742]: I1205 06:10:38.693541 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-etc-swift\") pod \"swift-storage-0\" (UID: \"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b\") " pod="openstack/swift-storage-0" Dec 05 06:10:38 crc kubenswrapper[4742]: E1205 06:10:38.693774 4742 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 06:10:38 crc kubenswrapper[4742]: E1205 06:10:38.693790 4742 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 06:10:38 crc kubenswrapper[4742]: E1205 06:10:38.693860 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-etc-swift podName:f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b nodeName:}" failed. No retries permitted until 2025-12-05 06:10:39.69384252 +0000 UTC m=+1115.605977582 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-etc-swift") pod "swift-storage-0" (UID: "f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b") : configmap "swift-ring-files" not found Dec 05 06:10:38 crc kubenswrapper[4742]: I1205 06:10:38.726287 4742 generic.go:334] "Generic (PLEG): container finished" podID="66961e66-3235-4bc8-995e-106a483d8724" containerID="25de760e9e0c88fbed4fd722bb636d50bce265dc8d3f1398594af46aeb0acfc7" exitCode=0 Dec 05 06:10:38 crc kubenswrapper[4742]: I1205 06:10:38.726352 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-6z8h9" event={"ID":"66961e66-3235-4bc8-995e-106a483d8724","Type":"ContainerDied","Data":"25de760e9e0c88fbed4fd722bb636d50bce265dc8d3f1398594af46aeb0acfc7"} Dec 05 06:10:39 crc kubenswrapper[4742]: I1205 06:10:39.708970 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-etc-swift\") pod \"swift-storage-0\" (UID: \"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b\") " pod="openstack/swift-storage-0" Dec 05 06:10:39 crc kubenswrapper[4742]: E1205 06:10:39.709230 4742 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 06:10:39 crc kubenswrapper[4742]: E1205 06:10:39.709459 4742 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 06:10:39 crc kubenswrapper[4742]: E1205 06:10:39.709533 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-etc-swift podName:f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b nodeName:}" failed. No retries permitted until 2025-12-05 06:10:41.709510788 +0000 UTC m=+1117.621645870 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-etc-swift") pod "swift-storage-0" (UID: "f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b") : configmap "swift-ring-files" not found Dec 05 06:10:39 crc kubenswrapper[4742]: I1205 06:10:39.734497 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-6z8h9" event={"ID":"66961e66-3235-4bc8-995e-106a483d8724","Type":"ContainerStarted","Data":"97fbad03797344f8e1cbf777f0992d8832647f1c01b87eb27839951ba561dd71"} Dec 05 06:10:39 crc kubenswrapper[4742]: I1205 06:10:39.734647 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b8fbc5445-6z8h9" Dec 05 06:10:39 crc kubenswrapper[4742]: I1205 06:10:39.760268 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b8fbc5445-6z8h9" podStartSLOduration=3.760252695 podStartE2EDuration="3.760252695s" podCreationTimestamp="2025-12-05 06:10:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:10:39.75666923 +0000 UTC m=+1115.668804292" watchObservedRunningTime="2025-12-05 06:10:39.760252695 +0000 UTC m=+1115.672387757" Dec 05 06:10:41 crc kubenswrapper[4742]: I1205 06:10:41.752675 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-etc-swift\") pod \"swift-storage-0\" (UID: \"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b\") " pod="openstack/swift-storage-0" Dec 05 06:10:41 crc kubenswrapper[4742]: E1205 06:10:41.754221 4742 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 06:10:41 crc kubenswrapper[4742]: E1205 06:10:41.754269 4742 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 06:10:41 crc kubenswrapper[4742]: E1205 06:10:41.754373 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-etc-swift podName:f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b nodeName:}" failed. No retries permitted until 2025-12-05 06:10:45.754341281 +0000 UTC m=+1121.666476383 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-etc-swift") pod "swift-storage-0" (UID: "f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b") : configmap "swift-ring-files" not found Dec 05 06:10:41 crc kubenswrapper[4742]: I1205 06:10:41.960707 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-mbfrw"] Dec 05 06:10:41 crc kubenswrapper[4742]: I1205 06:10:41.963569 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-mbfrw" Dec 05 06:10:41 crc kubenswrapper[4742]: I1205 06:10:41.966433 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 05 06:10:41 crc kubenswrapper[4742]: I1205 06:10:41.968719 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 05 06:10:41 crc kubenswrapper[4742]: I1205 06:10:41.968947 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 05 06:10:41 crc kubenswrapper[4742]: I1205 06:10:41.974752 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-mbfrw"] Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.026304 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-mbfrw"] Dec 05 06:10:42 crc kubenswrapper[4742]: E1205 06:10:42.027424 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-nfbl7 ring-data-devices scripts swiftconf], unattached volumes=[], failed to process volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-nfbl7 ring-data-devices scripts swiftconf]: context canceled" pod="openstack/swift-ring-rebalance-mbfrw" podUID="c4a26242-d670-42cf-9b64-1e671ca80f2f" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.036383 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-vcv4d"] Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.037458 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-vcv4d" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.041852 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-vcv4d"] Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.163871 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4a26242-d670-42cf-9b64-1e671ca80f2f-combined-ca-bundle\") pod \"swift-ring-rebalance-mbfrw\" (UID: \"c4a26242-d670-42cf-9b64-1e671ca80f2f\") " pod="openstack/swift-ring-rebalance-mbfrw" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.163915 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/52759157-a5b0-481a-9128-ee595e269af9-swiftconf\") pod \"swift-ring-rebalance-vcv4d\" (UID: \"52759157-a5b0-481a-9128-ee595e269af9\") " pod="openstack/swift-ring-rebalance-vcv4d" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.164020 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/52759157-a5b0-481a-9128-ee595e269af9-dispersionconf\") pod \"swift-ring-rebalance-vcv4d\" (UID: \"52759157-a5b0-481a-9128-ee595e269af9\") " pod="openstack/swift-ring-rebalance-vcv4d" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.164046 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52759157-a5b0-481a-9128-ee595e269af9-combined-ca-bundle\") pod \"swift-ring-rebalance-vcv4d\" (UID: \"52759157-a5b0-481a-9128-ee595e269af9\") " pod="openstack/swift-ring-rebalance-vcv4d" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.164076 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/52759157-a5b0-481a-9128-ee595e269af9-etc-swift\") pod \"swift-ring-rebalance-vcv4d\" (UID: \"52759157-a5b0-481a-9128-ee595e269af9\") " pod="openstack/swift-ring-rebalance-vcv4d" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.164099 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/52759157-a5b0-481a-9128-ee595e269af9-ring-data-devices\") pod \"swift-ring-rebalance-vcv4d\" (UID: \"52759157-a5b0-481a-9128-ee595e269af9\") " pod="openstack/swift-ring-rebalance-vcv4d" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.164187 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/c4a26242-d670-42cf-9b64-1e671ca80f2f-swiftconf\") pod \"swift-ring-rebalance-mbfrw\" (UID: \"c4a26242-d670-42cf-9b64-1e671ca80f2f\") " pod="openstack/swift-ring-rebalance-mbfrw" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.164236 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/52759157-a5b0-481a-9128-ee595e269af9-scripts\") pod \"swift-ring-rebalance-vcv4d\" (UID: \"52759157-a5b0-481a-9128-ee595e269af9\") " pod="openstack/swift-ring-rebalance-vcv4d" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.164257 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c4a26242-d670-42cf-9b64-1e671ca80f2f-scripts\") pod \"swift-ring-rebalance-mbfrw\" (UID: \"c4a26242-d670-42cf-9b64-1e671ca80f2f\") " pod="openstack/swift-ring-rebalance-mbfrw" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.164275 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xx7qz\" (UniqueName: \"kubernetes.io/projected/52759157-a5b0-481a-9128-ee595e269af9-kube-api-access-xx7qz\") pod \"swift-ring-rebalance-vcv4d\" (UID: \"52759157-a5b0-481a-9128-ee595e269af9\") " pod="openstack/swift-ring-rebalance-vcv4d" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.164362 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/c4a26242-d670-42cf-9b64-1e671ca80f2f-dispersionconf\") pod \"swift-ring-rebalance-mbfrw\" (UID: \"c4a26242-d670-42cf-9b64-1e671ca80f2f\") " pod="openstack/swift-ring-rebalance-mbfrw" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.164377 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/c4a26242-d670-42cf-9b64-1e671ca80f2f-ring-data-devices\") pod \"swift-ring-rebalance-mbfrw\" (UID: \"c4a26242-d670-42cf-9b64-1e671ca80f2f\") " pod="openstack/swift-ring-rebalance-mbfrw" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.164398 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/c4a26242-d670-42cf-9b64-1e671ca80f2f-etc-swift\") pod \"swift-ring-rebalance-mbfrw\" (UID: \"c4a26242-d670-42cf-9b64-1e671ca80f2f\") " pod="openstack/swift-ring-rebalance-mbfrw" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.164477 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfbl7\" (UniqueName: \"kubernetes.io/projected/c4a26242-d670-42cf-9b64-1e671ca80f2f-kube-api-access-nfbl7\") pod \"swift-ring-rebalance-mbfrw\" (UID: \"c4a26242-d670-42cf-9b64-1e671ca80f2f\") " pod="openstack/swift-ring-rebalance-mbfrw" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.265837 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/52759157-a5b0-481a-9128-ee595e269af9-dispersionconf\") pod \"swift-ring-rebalance-vcv4d\" (UID: \"52759157-a5b0-481a-9128-ee595e269af9\") " pod="openstack/swift-ring-rebalance-vcv4d" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.265894 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52759157-a5b0-481a-9128-ee595e269af9-combined-ca-bundle\") pod \"swift-ring-rebalance-vcv4d\" (UID: \"52759157-a5b0-481a-9128-ee595e269af9\") " pod="openstack/swift-ring-rebalance-vcv4d" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.265920 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/52759157-a5b0-481a-9128-ee595e269af9-etc-swift\") pod \"swift-ring-rebalance-vcv4d\" (UID: \"52759157-a5b0-481a-9128-ee595e269af9\") " pod="openstack/swift-ring-rebalance-vcv4d" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.265944 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/52759157-a5b0-481a-9128-ee595e269af9-ring-data-devices\") pod \"swift-ring-rebalance-vcv4d\" (UID: \"52759157-a5b0-481a-9128-ee595e269af9\") " pod="openstack/swift-ring-rebalance-vcv4d" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.265966 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/c4a26242-d670-42cf-9b64-1e671ca80f2f-swiftconf\") pod \"swift-ring-rebalance-mbfrw\" (UID: \"c4a26242-d670-42cf-9b64-1e671ca80f2f\") " pod="openstack/swift-ring-rebalance-mbfrw" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.265990 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/52759157-a5b0-481a-9128-ee595e269af9-scripts\") pod \"swift-ring-rebalance-vcv4d\" (UID: \"52759157-a5b0-481a-9128-ee595e269af9\") " pod="openstack/swift-ring-rebalance-vcv4d" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.266015 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c4a26242-d670-42cf-9b64-1e671ca80f2f-scripts\") pod \"swift-ring-rebalance-mbfrw\" (UID: \"c4a26242-d670-42cf-9b64-1e671ca80f2f\") " pod="openstack/swift-ring-rebalance-mbfrw" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.266037 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xx7qz\" (UniqueName: \"kubernetes.io/projected/52759157-a5b0-481a-9128-ee595e269af9-kube-api-access-xx7qz\") pod \"swift-ring-rebalance-vcv4d\" (UID: \"52759157-a5b0-481a-9128-ee595e269af9\") " pod="openstack/swift-ring-rebalance-vcv4d" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.266140 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/c4a26242-d670-42cf-9b64-1e671ca80f2f-dispersionconf\") pod \"swift-ring-rebalance-mbfrw\" (UID: \"c4a26242-d670-42cf-9b64-1e671ca80f2f\") " pod="openstack/swift-ring-rebalance-mbfrw" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.266174 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/c4a26242-d670-42cf-9b64-1e671ca80f2f-ring-data-devices\") pod \"swift-ring-rebalance-mbfrw\" (UID: \"c4a26242-d670-42cf-9b64-1e671ca80f2f\") " pod="openstack/swift-ring-rebalance-mbfrw" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.266203 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/c4a26242-d670-42cf-9b64-1e671ca80f2f-etc-swift\") pod \"swift-ring-rebalance-mbfrw\" (UID: \"c4a26242-d670-42cf-9b64-1e671ca80f2f\") " pod="openstack/swift-ring-rebalance-mbfrw" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.266254 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfbl7\" (UniqueName: \"kubernetes.io/projected/c4a26242-d670-42cf-9b64-1e671ca80f2f-kube-api-access-nfbl7\") pod \"swift-ring-rebalance-mbfrw\" (UID: \"c4a26242-d670-42cf-9b64-1e671ca80f2f\") " pod="openstack/swift-ring-rebalance-mbfrw" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.266300 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4a26242-d670-42cf-9b64-1e671ca80f2f-combined-ca-bundle\") pod \"swift-ring-rebalance-mbfrw\" (UID: \"c4a26242-d670-42cf-9b64-1e671ca80f2f\") " pod="openstack/swift-ring-rebalance-mbfrw" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.266321 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/52759157-a5b0-481a-9128-ee595e269af9-swiftconf\") pod \"swift-ring-rebalance-vcv4d\" (UID: \"52759157-a5b0-481a-9128-ee595e269af9\") " pod="openstack/swift-ring-rebalance-vcv4d" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.267233 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c4a26242-d670-42cf-9b64-1e671ca80f2f-scripts\") pod \"swift-ring-rebalance-mbfrw\" (UID: \"c4a26242-d670-42cf-9b64-1e671ca80f2f\") " pod="openstack/swift-ring-rebalance-mbfrw" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.267385 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/c4a26242-d670-42cf-9b64-1e671ca80f2f-ring-data-devices\") pod \"swift-ring-rebalance-mbfrw\" (UID: \"c4a26242-d670-42cf-9b64-1e671ca80f2f\") " pod="openstack/swift-ring-rebalance-mbfrw" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.267422 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/52759157-a5b0-481a-9128-ee595e269af9-ring-data-devices\") pod \"swift-ring-rebalance-vcv4d\" (UID: \"52759157-a5b0-481a-9128-ee595e269af9\") " pod="openstack/swift-ring-rebalance-vcv4d" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.267546 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/52759157-a5b0-481a-9128-ee595e269af9-scripts\") pod \"swift-ring-rebalance-vcv4d\" (UID: \"52759157-a5b0-481a-9128-ee595e269af9\") " pod="openstack/swift-ring-rebalance-vcv4d" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.267674 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/52759157-a5b0-481a-9128-ee595e269af9-etc-swift\") pod \"swift-ring-rebalance-vcv4d\" (UID: \"52759157-a5b0-481a-9128-ee595e269af9\") " pod="openstack/swift-ring-rebalance-vcv4d" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.267979 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/c4a26242-d670-42cf-9b64-1e671ca80f2f-etc-swift\") pod \"swift-ring-rebalance-mbfrw\" (UID: \"c4a26242-d670-42cf-9b64-1e671ca80f2f\") " pod="openstack/swift-ring-rebalance-mbfrw" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.271513 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/c4a26242-d670-42cf-9b64-1e671ca80f2f-dispersionconf\") pod \"swift-ring-rebalance-mbfrw\" (UID: \"c4a26242-d670-42cf-9b64-1e671ca80f2f\") " pod="openstack/swift-ring-rebalance-mbfrw" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.271850 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/52759157-a5b0-481a-9128-ee595e269af9-swiftconf\") pod \"swift-ring-rebalance-vcv4d\" (UID: \"52759157-a5b0-481a-9128-ee595e269af9\") " pod="openstack/swift-ring-rebalance-vcv4d" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.274998 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4a26242-d670-42cf-9b64-1e671ca80f2f-combined-ca-bundle\") pod \"swift-ring-rebalance-mbfrw\" (UID: \"c4a26242-d670-42cf-9b64-1e671ca80f2f\") " pod="openstack/swift-ring-rebalance-mbfrw" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.276686 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52759157-a5b0-481a-9128-ee595e269af9-combined-ca-bundle\") pod \"swift-ring-rebalance-vcv4d\" (UID: \"52759157-a5b0-481a-9128-ee595e269af9\") " pod="openstack/swift-ring-rebalance-vcv4d" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.280730 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/52759157-a5b0-481a-9128-ee595e269af9-dispersionconf\") pod \"swift-ring-rebalance-vcv4d\" (UID: \"52759157-a5b0-481a-9128-ee595e269af9\") " pod="openstack/swift-ring-rebalance-vcv4d" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.287638 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/c4a26242-d670-42cf-9b64-1e671ca80f2f-swiftconf\") pod \"swift-ring-rebalance-mbfrw\" (UID: \"c4a26242-d670-42cf-9b64-1e671ca80f2f\") " pod="openstack/swift-ring-rebalance-mbfrw" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.287868 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xx7qz\" (UniqueName: \"kubernetes.io/projected/52759157-a5b0-481a-9128-ee595e269af9-kube-api-access-xx7qz\") pod \"swift-ring-rebalance-vcv4d\" (UID: \"52759157-a5b0-481a-9128-ee595e269af9\") " pod="openstack/swift-ring-rebalance-vcv4d" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.296979 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfbl7\" (UniqueName: \"kubernetes.io/projected/c4a26242-d670-42cf-9b64-1e671ca80f2f-kube-api-access-nfbl7\") pod \"swift-ring-rebalance-mbfrw\" (UID: \"c4a26242-d670-42cf-9b64-1e671ca80f2f\") " pod="openstack/swift-ring-rebalance-mbfrw" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.356748 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-vcv4d" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.761257 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-mbfrw" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.775978 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-mbfrw" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.876046 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/c4a26242-d670-42cf-9b64-1e671ca80f2f-swiftconf\") pod \"c4a26242-d670-42cf-9b64-1e671ca80f2f\" (UID: \"c4a26242-d670-42cf-9b64-1e671ca80f2f\") " Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.876110 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c4a26242-d670-42cf-9b64-1e671ca80f2f-scripts\") pod \"c4a26242-d670-42cf-9b64-1e671ca80f2f\" (UID: \"c4a26242-d670-42cf-9b64-1e671ca80f2f\") " Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.876136 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4a26242-d670-42cf-9b64-1e671ca80f2f-combined-ca-bundle\") pod \"c4a26242-d670-42cf-9b64-1e671ca80f2f\" (UID: \"c4a26242-d670-42cf-9b64-1e671ca80f2f\") " Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.876204 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nfbl7\" (UniqueName: \"kubernetes.io/projected/c4a26242-d670-42cf-9b64-1e671ca80f2f-kube-api-access-nfbl7\") pod \"c4a26242-d670-42cf-9b64-1e671ca80f2f\" (UID: \"c4a26242-d670-42cf-9b64-1e671ca80f2f\") " Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.876246 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/c4a26242-d670-42cf-9b64-1e671ca80f2f-ring-data-devices\") pod \"c4a26242-d670-42cf-9b64-1e671ca80f2f\" (UID: \"c4a26242-d670-42cf-9b64-1e671ca80f2f\") " Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.876340 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/c4a26242-d670-42cf-9b64-1e671ca80f2f-dispersionconf\") pod \"c4a26242-d670-42cf-9b64-1e671ca80f2f\" (UID: \"c4a26242-d670-42cf-9b64-1e671ca80f2f\") " Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.876427 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/c4a26242-d670-42cf-9b64-1e671ca80f2f-etc-swift\") pod \"c4a26242-d670-42cf-9b64-1e671ca80f2f\" (UID: \"c4a26242-d670-42cf-9b64-1e671ca80f2f\") " Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.877193 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4a26242-d670-42cf-9b64-1e671ca80f2f-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "c4a26242-d670-42cf-9b64-1e671ca80f2f" (UID: "c4a26242-d670-42cf-9b64-1e671ca80f2f"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.878504 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4a26242-d670-42cf-9b64-1e671ca80f2f-scripts" (OuterVolumeSpecName: "scripts") pod "c4a26242-d670-42cf-9b64-1e671ca80f2f" (UID: "c4a26242-d670-42cf-9b64-1e671ca80f2f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.878521 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4a26242-d670-42cf-9b64-1e671ca80f2f-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "c4a26242-d670-42cf-9b64-1e671ca80f2f" (UID: "c4a26242-d670-42cf-9b64-1e671ca80f2f"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.882239 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4a26242-d670-42cf-9b64-1e671ca80f2f-kube-api-access-nfbl7" (OuterVolumeSpecName: "kube-api-access-nfbl7") pod "c4a26242-d670-42cf-9b64-1e671ca80f2f" (UID: "c4a26242-d670-42cf-9b64-1e671ca80f2f"). InnerVolumeSpecName "kube-api-access-nfbl7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.882735 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4a26242-d670-42cf-9b64-1e671ca80f2f-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "c4a26242-d670-42cf-9b64-1e671ca80f2f" (UID: "c4a26242-d670-42cf-9b64-1e671ca80f2f"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.887121 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4a26242-d670-42cf-9b64-1e671ca80f2f-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "c4a26242-d670-42cf-9b64-1e671ca80f2f" (UID: "c4a26242-d670-42cf-9b64-1e671ca80f2f"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.892178 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4a26242-d670-42cf-9b64-1e671ca80f2f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c4a26242-d670-42cf-9b64-1e671ca80f2f" (UID: "c4a26242-d670-42cf-9b64-1e671ca80f2f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.921489 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-vcv4d"] Dec 05 06:10:42 crc kubenswrapper[4742]: W1205 06:10:42.924920 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod52759157_a5b0_481a_9128_ee595e269af9.slice/crio-dbab7e307227c706f2bfaa06abfe46fbc0143e625c49cb9d747748af7d6b0e66 WatchSource:0}: Error finding container dbab7e307227c706f2bfaa06abfe46fbc0143e625c49cb9d747748af7d6b0e66: Status 404 returned error can't find the container with id dbab7e307227c706f2bfaa06abfe46fbc0143e625c49cb9d747748af7d6b0e66 Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.978983 4742 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/c4a26242-d670-42cf-9b64-1e671ca80f2f-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.979022 4742 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/c4a26242-d670-42cf-9b64-1e671ca80f2f-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.979037 4742 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/c4a26242-d670-42cf-9b64-1e671ca80f2f-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.979051 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c4a26242-d670-42cf-9b64-1e671ca80f2f-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.979106 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4a26242-d670-42cf-9b64-1e671ca80f2f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.979120 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nfbl7\" (UniqueName: \"kubernetes.io/projected/c4a26242-d670-42cf-9b64-1e671ca80f2f-kube-api-access-nfbl7\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:42 crc kubenswrapper[4742]: I1205 06:10:42.979134 4742 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/c4a26242-d670-42cf-9b64-1e671ca80f2f-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:43 crc kubenswrapper[4742]: I1205 06:10:43.403790 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 05 06:10:43 crc kubenswrapper[4742]: I1205 06:10:43.410456 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 05 06:10:43 crc kubenswrapper[4742]: I1205 06:10:43.411866 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 05 06:10:43 crc kubenswrapper[4742]: I1205 06:10:43.544989 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 05 06:10:43 crc kubenswrapper[4742]: I1205 06:10:43.770453 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-vcv4d" event={"ID":"52759157-a5b0-481a-9128-ee595e269af9","Type":"ContainerStarted","Data":"dbab7e307227c706f2bfaa06abfe46fbc0143e625c49cb9d747748af7d6b0e66"} Dec 05 06:10:43 crc kubenswrapper[4742]: I1205 06:10:43.770496 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-mbfrw" Dec 05 06:10:43 crc kubenswrapper[4742]: I1205 06:10:43.844948 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-mbfrw"] Dec 05 06:10:43 crc kubenswrapper[4742]: I1205 06:10:43.854465 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-mbfrw"] Dec 05 06:10:43 crc kubenswrapper[4742]: I1205 06:10:43.890878 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 05 06:10:44 crc kubenswrapper[4742]: I1205 06:10:44.402861 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4a26242-d670-42cf-9b64-1e671ca80f2f" path="/var/lib/kubelet/pods/c4a26242-d670-42cf-9b64-1e671ca80f2f/volumes" Dec 05 06:10:44 crc kubenswrapper[4742]: I1205 06:10:44.688747 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-7678-account-create-update-pv5qw"] Dec 05 06:10:44 crc kubenswrapper[4742]: I1205 06:10:44.689874 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7678-account-create-update-pv5qw" Dec 05 06:10:44 crc kubenswrapper[4742]: I1205 06:10:44.693283 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 05 06:10:44 crc kubenswrapper[4742]: I1205 06:10:44.696803 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7678-account-create-update-pv5qw"] Dec 05 06:10:44 crc kubenswrapper[4742]: I1205 06:10:44.762443 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-75chc"] Dec 05 06:10:44 crc kubenswrapper[4742]: I1205 06:10:44.763697 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-75chc" Dec 05 06:10:44 crc kubenswrapper[4742]: I1205 06:10:44.785348 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-75chc"] Dec 05 06:10:44 crc kubenswrapper[4742]: I1205 06:10:44.809230 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zbgqk\" (UniqueName: \"kubernetes.io/projected/cf9c88e4-e053-4594-bc1f-176035f2bff7-kube-api-access-zbgqk\") pod \"keystone-7678-account-create-update-pv5qw\" (UID: \"cf9c88e4-e053-4594-bc1f-176035f2bff7\") " pod="openstack/keystone-7678-account-create-update-pv5qw" Dec 05 06:10:44 crc kubenswrapper[4742]: I1205 06:10:44.809354 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cf9c88e4-e053-4594-bc1f-176035f2bff7-operator-scripts\") pod \"keystone-7678-account-create-update-pv5qw\" (UID: \"cf9c88e4-e053-4594-bc1f-176035f2bff7\") " pod="openstack/keystone-7678-account-create-update-pv5qw" Dec 05 06:10:44 crc kubenswrapper[4742]: I1205 06:10:44.910769 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66ddfb11-066b-41f6-8bd0-7248f3cc36ea-operator-scripts\") pod \"keystone-db-create-75chc\" (UID: \"66ddfb11-066b-41f6-8bd0-7248f3cc36ea\") " pod="openstack/keystone-db-create-75chc" Dec 05 06:10:44 crc kubenswrapper[4742]: I1205 06:10:44.910815 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zbgqk\" (UniqueName: \"kubernetes.io/projected/cf9c88e4-e053-4594-bc1f-176035f2bff7-kube-api-access-zbgqk\") pod \"keystone-7678-account-create-update-pv5qw\" (UID: \"cf9c88e4-e053-4594-bc1f-176035f2bff7\") " pod="openstack/keystone-7678-account-create-update-pv5qw" Dec 05 06:10:44 crc kubenswrapper[4742]: I1205 06:10:44.910888 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vz55z\" (UniqueName: \"kubernetes.io/projected/66ddfb11-066b-41f6-8bd0-7248f3cc36ea-kube-api-access-vz55z\") pod \"keystone-db-create-75chc\" (UID: \"66ddfb11-066b-41f6-8bd0-7248f3cc36ea\") " pod="openstack/keystone-db-create-75chc" Dec 05 06:10:44 crc kubenswrapper[4742]: I1205 06:10:44.911146 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cf9c88e4-e053-4594-bc1f-176035f2bff7-operator-scripts\") pod \"keystone-7678-account-create-update-pv5qw\" (UID: \"cf9c88e4-e053-4594-bc1f-176035f2bff7\") " pod="openstack/keystone-7678-account-create-update-pv5qw" Dec 05 06:10:44 crc kubenswrapper[4742]: I1205 06:10:44.912077 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cf9c88e4-e053-4594-bc1f-176035f2bff7-operator-scripts\") pod \"keystone-7678-account-create-update-pv5qw\" (UID: \"cf9c88e4-e053-4594-bc1f-176035f2bff7\") " pod="openstack/keystone-7678-account-create-update-pv5qw" Dec 05 06:10:44 crc kubenswrapper[4742]: I1205 06:10:44.940100 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-mlg7m"] Dec 05 06:10:44 crc kubenswrapper[4742]: I1205 06:10:44.942792 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-mlg7m" Dec 05 06:10:44 crc kubenswrapper[4742]: I1205 06:10:44.950624 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zbgqk\" (UniqueName: \"kubernetes.io/projected/cf9c88e4-e053-4594-bc1f-176035f2bff7-kube-api-access-zbgqk\") pod \"keystone-7678-account-create-update-pv5qw\" (UID: \"cf9c88e4-e053-4594-bc1f-176035f2bff7\") " pod="openstack/keystone-7678-account-create-update-pv5qw" Dec 05 06:10:44 crc kubenswrapper[4742]: I1205 06:10:44.964599 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-mlg7m"] Dec 05 06:10:45 crc kubenswrapper[4742]: I1205 06:10:45.012229 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d380e842-910a-443b-aa5e-151fa1fe43ea-operator-scripts\") pod \"placement-db-create-mlg7m\" (UID: \"d380e842-910a-443b-aa5e-151fa1fe43ea\") " pod="openstack/placement-db-create-mlg7m" Dec 05 06:10:45 crc kubenswrapper[4742]: I1205 06:10:45.012303 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9kpq\" (UniqueName: \"kubernetes.io/projected/d380e842-910a-443b-aa5e-151fa1fe43ea-kube-api-access-x9kpq\") pod \"placement-db-create-mlg7m\" (UID: \"d380e842-910a-443b-aa5e-151fa1fe43ea\") " pod="openstack/placement-db-create-mlg7m" Dec 05 06:10:45 crc kubenswrapper[4742]: I1205 06:10:45.012373 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66ddfb11-066b-41f6-8bd0-7248f3cc36ea-operator-scripts\") pod \"keystone-db-create-75chc\" (UID: \"66ddfb11-066b-41f6-8bd0-7248f3cc36ea\") " pod="openstack/keystone-db-create-75chc" Dec 05 06:10:45 crc kubenswrapper[4742]: I1205 06:10:45.012395 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vz55z\" (UniqueName: \"kubernetes.io/projected/66ddfb11-066b-41f6-8bd0-7248f3cc36ea-kube-api-access-vz55z\") pod \"keystone-db-create-75chc\" (UID: \"66ddfb11-066b-41f6-8bd0-7248f3cc36ea\") " pod="openstack/keystone-db-create-75chc" Dec 05 06:10:45 crc kubenswrapper[4742]: I1205 06:10:45.013397 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66ddfb11-066b-41f6-8bd0-7248f3cc36ea-operator-scripts\") pod \"keystone-db-create-75chc\" (UID: \"66ddfb11-066b-41f6-8bd0-7248f3cc36ea\") " pod="openstack/keystone-db-create-75chc" Dec 05 06:10:45 crc kubenswrapper[4742]: I1205 06:10:45.042468 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vz55z\" (UniqueName: \"kubernetes.io/projected/66ddfb11-066b-41f6-8bd0-7248f3cc36ea-kube-api-access-vz55z\") pod \"keystone-db-create-75chc\" (UID: \"66ddfb11-066b-41f6-8bd0-7248f3cc36ea\") " pod="openstack/keystone-db-create-75chc" Dec 05 06:10:45 crc kubenswrapper[4742]: I1205 06:10:45.049273 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-9800-account-create-update-gpgxj"] Dec 05 06:10:45 crc kubenswrapper[4742]: I1205 06:10:45.050275 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-9800-account-create-update-gpgxj" Dec 05 06:10:45 crc kubenswrapper[4742]: I1205 06:10:45.052642 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 05 06:10:45 crc kubenswrapper[4742]: I1205 06:10:45.066729 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7678-account-create-update-pv5qw" Dec 05 06:10:45 crc kubenswrapper[4742]: I1205 06:10:45.081037 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-9800-account-create-update-gpgxj"] Dec 05 06:10:45 crc kubenswrapper[4742]: I1205 06:10:45.084181 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-75chc" Dec 05 06:10:45 crc kubenswrapper[4742]: I1205 06:10:45.113463 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67gk6\" (UniqueName: \"kubernetes.io/projected/6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6-kube-api-access-67gk6\") pod \"placement-9800-account-create-update-gpgxj\" (UID: \"6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6\") " pod="openstack/placement-9800-account-create-update-gpgxj" Dec 05 06:10:45 crc kubenswrapper[4742]: I1205 06:10:45.113542 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6-operator-scripts\") pod \"placement-9800-account-create-update-gpgxj\" (UID: \"6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6\") " pod="openstack/placement-9800-account-create-update-gpgxj" Dec 05 06:10:45 crc kubenswrapper[4742]: I1205 06:10:45.113615 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d380e842-910a-443b-aa5e-151fa1fe43ea-operator-scripts\") pod \"placement-db-create-mlg7m\" (UID: \"d380e842-910a-443b-aa5e-151fa1fe43ea\") " pod="openstack/placement-db-create-mlg7m" Dec 05 06:10:45 crc kubenswrapper[4742]: I1205 06:10:45.113665 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9kpq\" (UniqueName: \"kubernetes.io/projected/d380e842-910a-443b-aa5e-151fa1fe43ea-kube-api-access-x9kpq\") pod \"placement-db-create-mlg7m\" (UID: \"d380e842-910a-443b-aa5e-151fa1fe43ea\") " pod="openstack/placement-db-create-mlg7m" Dec 05 06:10:45 crc kubenswrapper[4742]: I1205 06:10:45.114774 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d380e842-910a-443b-aa5e-151fa1fe43ea-operator-scripts\") pod \"placement-db-create-mlg7m\" (UID: \"d380e842-910a-443b-aa5e-151fa1fe43ea\") " pod="openstack/placement-db-create-mlg7m" Dec 05 06:10:45 crc kubenswrapper[4742]: I1205 06:10:45.128511 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9kpq\" (UniqueName: \"kubernetes.io/projected/d380e842-910a-443b-aa5e-151fa1fe43ea-kube-api-access-x9kpq\") pod \"placement-db-create-mlg7m\" (UID: \"d380e842-910a-443b-aa5e-151fa1fe43ea\") " pod="openstack/placement-db-create-mlg7m" Dec 05 06:10:45 crc kubenswrapper[4742]: I1205 06:10:45.215047 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67gk6\" (UniqueName: \"kubernetes.io/projected/6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6-kube-api-access-67gk6\") pod \"placement-9800-account-create-update-gpgxj\" (UID: \"6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6\") " pod="openstack/placement-9800-account-create-update-gpgxj" Dec 05 06:10:45 crc kubenswrapper[4742]: I1205 06:10:45.215104 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6-operator-scripts\") pod \"placement-9800-account-create-update-gpgxj\" (UID: \"6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6\") " pod="openstack/placement-9800-account-create-update-gpgxj" Dec 05 06:10:45 crc kubenswrapper[4742]: I1205 06:10:45.215848 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6-operator-scripts\") pod \"placement-9800-account-create-update-gpgxj\" (UID: \"6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6\") " pod="openstack/placement-9800-account-create-update-gpgxj" Dec 05 06:10:45 crc kubenswrapper[4742]: I1205 06:10:45.235005 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67gk6\" (UniqueName: \"kubernetes.io/projected/6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6-kube-api-access-67gk6\") pod \"placement-9800-account-create-update-gpgxj\" (UID: \"6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6\") " pod="openstack/placement-9800-account-create-update-gpgxj" Dec 05 06:10:45 crc kubenswrapper[4742]: I1205 06:10:45.284626 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-mlg7m" Dec 05 06:10:45 crc kubenswrapper[4742]: I1205 06:10:45.388855 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-9800-account-create-update-gpgxj" Dec 05 06:10:45 crc kubenswrapper[4742]: I1205 06:10:45.542217 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7678-account-create-update-pv5qw"] Dec 05 06:10:45 crc kubenswrapper[4742]: I1205 06:10:45.626707 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-75chc"] Dec 05 06:10:45 crc kubenswrapper[4742]: I1205 06:10:45.824852 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-etc-swift\") pod \"swift-storage-0\" (UID: \"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b\") " pod="openstack/swift-storage-0" Dec 05 06:10:45 crc kubenswrapper[4742]: E1205 06:10:45.825142 4742 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 06:10:45 crc kubenswrapper[4742]: E1205 06:10:45.825182 4742 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 06:10:45 crc kubenswrapper[4742]: E1205 06:10:45.825253 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-etc-swift podName:f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b nodeName:}" failed. No retries permitted until 2025-12-05 06:10:53.825231071 +0000 UTC m=+1129.737366143 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-etc-swift") pod "swift-storage-0" (UID: "f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b") : configmap "swift-ring-files" not found Dec 05 06:10:46 crc kubenswrapper[4742]: I1205 06:10:46.671025 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:10:46 crc kubenswrapper[4742]: I1205 06:10:46.671369 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:10:47 crc kubenswrapper[4742]: W1205 06:10:47.177914 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcf9c88e4_e053_4594_bc1f_176035f2bff7.slice/crio-a1016c3679492f0ba39a60a9f649e0aefa001e9c1f0269e4d935c11329b8216d WatchSource:0}: Error finding container a1016c3679492f0ba39a60a9f649e0aefa001e9c1f0269e4d935c11329b8216d: Status 404 returned error can't find the container with id a1016c3679492f0ba39a60a9f649e0aefa001e9c1f0269e4d935c11329b8216d Dec 05 06:10:47 crc kubenswrapper[4742]: I1205 06:10:47.188095 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b8fbc5445-6z8h9" Dec 05 06:10:47 crc kubenswrapper[4742]: I1205 06:10:47.307888 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-ccctr"] Dec 05 06:10:47 crc kubenswrapper[4742]: I1205 06:10:47.308171 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8554648995-ccctr" podUID="b9048f3f-eef4-4fa6-933c-93c7ee484ae9" containerName="dnsmasq-dns" containerID="cri-o://7e06a33d5567943615a3daf51ff89aefcabdb6c028afac99704b73e3f7ce1ba1" gracePeriod=10 Dec 05 06:10:47 crc kubenswrapper[4742]: I1205 06:10:47.731674 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-mlg7m"] Dec 05 06:10:47 crc kubenswrapper[4742]: W1205 06:10:47.740642 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd380e842_910a_443b_aa5e_151fa1fe43ea.slice/crio-600150eec8f008ae95d274c55cec2e4714d6a32a25e75a67753e67bedb008e68 WatchSource:0}: Error finding container 600150eec8f008ae95d274c55cec2e4714d6a32a25e75a67753e67bedb008e68: Status 404 returned error can't find the container with id 600150eec8f008ae95d274c55cec2e4714d6a32a25e75a67753e67bedb008e68 Dec 05 06:10:47 crc kubenswrapper[4742]: I1205 06:10:47.812690 4742 generic.go:334] "Generic (PLEG): container finished" podID="b9048f3f-eef4-4fa6-933c-93c7ee484ae9" containerID="7e06a33d5567943615a3daf51ff89aefcabdb6c028afac99704b73e3f7ce1ba1" exitCode=0 Dec 05 06:10:47 crc kubenswrapper[4742]: I1205 06:10:47.812760 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-ccctr" event={"ID":"b9048f3f-eef4-4fa6-933c-93c7ee484ae9","Type":"ContainerDied","Data":"7e06a33d5567943615a3daf51ff89aefcabdb6c028afac99704b73e3f7ce1ba1"} Dec 05 06:10:47 crc kubenswrapper[4742]: I1205 06:10:47.817834 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-mlg7m" event={"ID":"d380e842-910a-443b-aa5e-151fa1fe43ea","Type":"ContainerStarted","Data":"600150eec8f008ae95d274c55cec2e4714d6a32a25e75a67753e67bedb008e68"} Dec 05 06:10:47 crc kubenswrapper[4742]: I1205 06:10:47.822123 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-75chc" event={"ID":"66ddfb11-066b-41f6-8bd0-7248f3cc36ea","Type":"ContainerStarted","Data":"a5c3a86a9f7073de7183f7f4b8179af08280e0785e85aa1a59af7258b22a18e1"} Dec 05 06:10:47 crc kubenswrapper[4742]: I1205 06:10:47.825166 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7678-account-create-update-pv5qw" event={"ID":"cf9c88e4-e053-4594-bc1f-176035f2bff7","Type":"ContainerStarted","Data":"a1016c3679492f0ba39a60a9f649e0aefa001e9c1f0269e4d935c11329b8216d"} Dec 05 06:10:47 crc kubenswrapper[4742]: I1205 06:10:47.828505 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-9800-account-create-update-gpgxj"] Dec 05 06:10:47 crc kubenswrapper[4742]: W1205 06:10:47.832379 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6bfd6781_fa3f_4cc4_9ba9_9dd169d44af6.slice/crio-436cae8bb715069a8d59dd33f0d6b12b738e9f2f683b338cd6ad565d6dc5b593 WatchSource:0}: Error finding container 436cae8bb715069a8d59dd33f0d6b12b738e9f2f683b338cd6ad565d6dc5b593: Status 404 returned error can't find the container with id 436cae8bb715069a8d59dd33f0d6b12b738e9f2f683b338cd6ad565d6dc5b593 Dec 05 06:10:48 crc kubenswrapper[4742]: I1205 06:10:48.840246 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-9800-account-create-update-gpgxj" event={"ID":"6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6","Type":"ContainerStarted","Data":"436cae8bb715069a8d59dd33f0d6b12b738e9f2f683b338cd6ad565d6dc5b593"} Dec 05 06:10:49 crc kubenswrapper[4742]: I1205 06:10:49.772339 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-ccctr" Dec 05 06:10:49 crc kubenswrapper[4742]: I1205 06:10:49.859444 4742 generic.go:334] "Generic (PLEG): container finished" podID="d380e842-910a-443b-aa5e-151fa1fe43ea" containerID="5eb38408e54a19b9894ac41a44e186c12072601993fd24a9ae1debe6066e29b3" exitCode=0 Dec 05 06:10:49 crc kubenswrapper[4742]: I1205 06:10:49.859828 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-mlg7m" event={"ID":"d380e842-910a-443b-aa5e-151fa1fe43ea","Type":"ContainerDied","Data":"5eb38408e54a19b9894ac41a44e186c12072601993fd24a9ae1debe6066e29b3"} Dec 05 06:10:49 crc kubenswrapper[4742]: I1205 06:10:49.861887 4742 generic.go:334] "Generic (PLEG): container finished" podID="66ddfb11-066b-41f6-8bd0-7248f3cc36ea" containerID="1d882621240ed0d1f7dc3e986f4b8a98dd8af495e6e2a4a66c919c647194c445" exitCode=0 Dec 05 06:10:49 crc kubenswrapper[4742]: I1205 06:10:49.861978 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-75chc" event={"ID":"66ddfb11-066b-41f6-8bd0-7248f3cc36ea","Type":"ContainerDied","Data":"1d882621240ed0d1f7dc3e986f4b8a98dd8af495e6e2a4a66c919c647194c445"} Dec 05 06:10:49 crc kubenswrapper[4742]: I1205 06:10:49.864395 4742 generic.go:334] "Generic (PLEG): container finished" podID="6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6" containerID="e1dd1f0e3402faa64ceb85997c4cfb6e94854d0472d6100b68ff61ab2b9908a5" exitCode=0 Dec 05 06:10:49 crc kubenswrapper[4742]: I1205 06:10:49.864505 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-9800-account-create-update-gpgxj" event={"ID":"6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6","Type":"ContainerDied","Data":"e1dd1f0e3402faa64ceb85997c4cfb6e94854d0472d6100b68ff61ab2b9908a5"} Dec 05 06:10:49 crc kubenswrapper[4742]: I1205 06:10:49.865764 4742 generic.go:334] "Generic (PLEG): container finished" podID="cf9c88e4-e053-4594-bc1f-176035f2bff7" containerID="ead6e2c286b745cccfdd700f740be2071abf1e0e57f79f722943c9e07f78cbe0" exitCode=0 Dec 05 06:10:49 crc kubenswrapper[4742]: I1205 06:10:49.865828 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7678-account-create-update-pv5qw" event={"ID":"cf9c88e4-e053-4594-bc1f-176035f2bff7","Type":"ContainerDied","Data":"ead6e2c286b745cccfdd700f740be2071abf1e0e57f79f722943c9e07f78cbe0"} Dec 05 06:10:49 crc kubenswrapper[4742]: I1205 06:10:49.866928 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-vcv4d" event={"ID":"52759157-a5b0-481a-9128-ee595e269af9","Type":"ContainerStarted","Data":"2fb3977eb90709f749280ec6d06d71a5471a123464a4381ce4aa664c99c814b2"} Dec 05 06:10:49 crc kubenswrapper[4742]: I1205 06:10:49.870188 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-ccctr" event={"ID":"b9048f3f-eef4-4fa6-933c-93c7ee484ae9","Type":"ContainerDied","Data":"680f02453e0e28d776b3a83346414cd65e5b29e43ee403f425be298dd883c786"} Dec 05 06:10:49 crc kubenswrapper[4742]: I1205 06:10:49.870231 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-ccctr" Dec 05 06:10:49 crc kubenswrapper[4742]: I1205 06:10:49.870253 4742 scope.go:117] "RemoveContainer" containerID="7e06a33d5567943615a3daf51ff89aefcabdb6c028afac99704b73e3f7ce1ba1" Dec 05 06:10:49 crc kubenswrapper[4742]: I1205 06:10:49.898222 4742 scope.go:117] "RemoveContainer" containerID="75a911ef5516c55fd4f62824d78ef9c59b29ec073a189aa7cb7c9c220fe8a982" Dec 05 06:10:49 crc kubenswrapper[4742]: I1205 06:10:49.910537 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b9048f3f-eef4-4fa6-933c-93c7ee484ae9-ovsdbserver-sb\") pod \"b9048f3f-eef4-4fa6-933c-93c7ee484ae9\" (UID: \"b9048f3f-eef4-4fa6-933c-93c7ee484ae9\") " Dec 05 06:10:49 crc kubenswrapper[4742]: I1205 06:10:49.910578 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b9048f3f-eef4-4fa6-933c-93c7ee484ae9-dns-svc\") pod \"b9048f3f-eef4-4fa6-933c-93c7ee484ae9\" (UID: \"b9048f3f-eef4-4fa6-933c-93c7ee484ae9\") " Dec 05 06:10:49 crc kubenswrapper[4742]: I1205 06:10:49.910654 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55f9m\" (UniqueName: \"kubernetes.io/projected/b9048f3f-eef4-4fa6-933c-93c7ee484ae9-kube-api-access-55f9m\") pod \"b9048f3f-eef4-4fa6-933c-93c7ee484ae9\" (UID: \"b9048f3f-eef4-4fa6-933c-93c7ee484ae9\") " Dec 05 06:10:49 crc kubenswrapper[4742]: I1205 06:10:49.910710 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b9048f3f-eef4-4fa6-933c-93c7ee484ae9-ovsdbserver-nb\") pod \"b9048f3f-eef4-4fa6-933c-93c7ee484ae9\" (UID: \"b9048f3f-eef4-4fa6-933c-93c7ee484ae9\") " Dec 05 06:10:49 crc kubenswrapper[4742]: I1205 06:10:49.910770 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9048f3f-eef4-4fa6-933c-93c7ee484ae9-config\") pod \"b9048f3f-eef4-4fa6-933c-93c7ee484ae9\" (UID: \"b9048f3f-eef4-4fa6-933c-93c7ee484ae9\") " Dec 05 06:10:49 crc kubenswrapper[4742]: I1205 06:10:49.918019 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9048f3f-eef4-4fa6-933c-93c7ee484ae9-kube-api-access-55f9m" (OuterVolumeSpecName: "kube-api-access-55f9m") pod "b9048f3f-eef4-4fa6-933c-93c7ee484ae9" (UID: "b9048f3f-eef4-4fa6-933c-93c7ee484ae9"). InnerVolumeSpecName "kube-api-access-55f9m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:10:49 crc kubenswrapper[4742]: I1205 06:10:49.959096 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-vcv4d" podStartSLOduration=2.632652447 podStartE2EDuration="8.959076532s" podCreationTimestamp="2025-12-05 06:10:41 +0000 UTC" firstStartedPulling="2025-12-05 06:10:42.927615213 +0000 UTC m=+1118.839750275" lastFinishedPulling="2025-12-05 06:10:49.254039288 +0000 UTC m=+1125.166174360" observedRunningTime="2025-12-05 06:10:49.950627137 +0000 UTC m=+1125.862762219" watchObservedRunningTime="2025-12-05 06:10:49.959076532 +0000 UTC m=+1125.871211594" Dec 05 06:10:49 crc kubenswrapper[4742]: I1205 06:10:49.963753 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9048f3f-eef4-4fa6-933c-93c7ee484ae9-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b9048f3f-eef4-4fa6-933c-93c7ee484ae9" (UID: "b9048f3f-eef4-4fa6-933c-93c7ee484ae9"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:10:49 crc kubenswrapper[4742]: I1205 06:10:49.966286 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9048f3f-eef4-4fa6-933c-93c7ee484ae9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b9048f3f-eef4-4fa6-933c-93c7ee484ae9" (UID: "b9048f3f-eef4-4fa6-933c-93c7ee484ae9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:10:49 crc kubenswrapper[4742]: I1205 06:10:49.977876 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9048f3f-eef4-4fa6-933c-93c7ee484ae9-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b9048f3f-eef4-4fa6-933c-93c7ee484ae9" (UID: "b9048f3f-eef4-4fa6-933c-93c7ee484ae9"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:10:49 crc kubenswrapper[4742]: I1205 06:10:49.984349 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9048f3f-eef4-4fa6-933c-93c7ee484ae9-config" (OuterVolumeSpecName: "config") pod "b9048f3f-eef4-4fa6-933c-93c7ee484ae9" (UID: "b9048f3f-eef4-4fa6-933c-93c7ee484ae9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.013418 4742 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b9048f3f-eef4-4fa6-933c-93c7ee484ae9-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.013454 4742 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b9048f3f-eef4-4fa6-933c-93c7ee484ae9-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.013466 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55f9m\" (UniqueName: \"kubernetes.io/projected/b9048f3f-eef4-4fa6-933c-93c7ee484ae9-kube-api-access-55f9m\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.013479 4742 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b9048f3f-eef4-4fa6-933c-93c7ee484ae9-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.013490 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9048f3f-eef4-4fa6-933c-93c7ee484ae9-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.219271 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-ccctr"] Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.236522 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8554648995-ccctr"] Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.258318 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-5llvc"] Dec 05 06:10:50 crc kubenswrapper[4742]: E1205 06:10:50.260305 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9048f3f-eef4-4fa6-933c-93c7ee484ae9" containerName="dnsmasq-dns" Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.260340 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9048f3f-eef4-4fa6-933c-93c7ee484ae9" containerName="dnsmasq-dns" Dec 05 06:10:50 crc kubenswrapper[4742]: E1205 06:10:50.260401 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9048f3f-eef4-4fa6-933c-93c7ee484ae9" containerName="init" Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.260412 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9048f3f-eef4-4fa6-933c-93c7ee484ae9" containerName="init" Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.261082 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9048f3f-eef4-4fa6-933c-93c7ee484ae9" containerName="dnsmasq-dns" Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.262100 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-5llvc" Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.281892 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-5llvc"] Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.316936 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldjrl\" (UniqueName: \"kubernetes.io/projected/1d395b5f-0ac7-4a77-ac68-27bc1b40915f-kube-api-access-ldjrl\") pod \"glance-db-create-5llvc\" (UID: \"1d395b5f-0ac7-4a77-ac68-27bc1b40915f\") " pod="openstack/glance-db-create-5llvc" Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.317004 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d395b5f-0ac7-4a77-ac68-27bc1b40915f-operator-scripts\") pod \"glance-db-create-5llvc\" (UID: \"1d395b5f-0ac7-4a77-ac68-27bc1b40915f\") " pod="openstack/glance-db-create-5llvc" Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.334812 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-bc83-account-create-update-6t2vt"] Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.335753 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-bc83-account-create-update-6t2vt" Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.342391 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.344465 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-bc83-account-create-update-6t2vt"] Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.394277 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9048f3f-eef4-4fa6-933c-93c7ee484ae9" path="/var/lib/kubelet/pods/b9048f3f-eef4-4fa6-933c-93c7ee484ae9/volumes" Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.418488 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptj7j\" (UniqueName: \"kubernetes.io/projected/2eb42b8d-3238-4559-99c7-92255d22f81a-kube-api-access-ptj7j\") pod \"glance-bc83-account-create-update-6t2vt\" (UID: \"2eb42b8d-3238-4559-99c7-92255d22f81a\") " pod="openstack/glance-bc83-account-create-update-6t2vt" Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.418547 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2eb42b8d-3238-4559-99c7-92255d22f81a-operator-scripts\") pod \"glance-bc83-account-create-update-6t2vt\" (UID: \"2eb42b8d-3238-4559-99c7-92255d22f81a\") " pod="openstack/glance-bc83-account-create-update-6t2vt" Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.418728 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldjrl\" (UniqueName: \"kubernetes.io/projected/1d395b5f-0ac7-4a77-ac68-27bc1b40915f-kube-api-access-ldjrl\") pod \"glance-db-create-5llvc\" (UID: \"1d395b5f-0ac7-4a77-ac68-27bc1b40915f\") " pod="openstack/glance-db-create-5llvc" Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.418776 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d395b5f-0ac7-4a77-ac68-27bc1b40915f-operator-scripts\") pod \"glance-db-create-5llvc\" (UID: \"1d395b5f-0ac7-4a77-ac68-27bc1b40915f\") " pod="openstack/glance-db-create-5llvc" Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.420343 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d395b5f-0ac7-4a77-ac68-27bc1b40915f-operator-scripts\") pod \"glance-db-create-5llvc\" (UID: \"1d395b5f-0ac7-4a77-ac68-27bc1b40915f\") " pod="openstack/glance-db-create-5llvc" Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.444702 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldjrl\" (UniqueName: \"kubernetes.io/projected/1d395b5f-0ac7-4a77-ac68-27bc1b40915f-kube-api-access-ldjrl\") pod \"glance-db-create-5llvc\" (UID: \"1d395b5f-0ac7-4a77-ac68-27bc1b40915f\") " pod="openstack/glance-db-create-5llvc" Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.464257 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-9n84z" podUID="b5df8784-b63d-41b7-a542-dcf53ea6cc5e" containerName="ovn-controller" probeResult="failure" output=< Dec 05 06:10:50 crc kubenswrapper[4742]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 05 06:10:50 crc kubenswrapper[4742]: > Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.520672 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptj7j\" (UniqueName: \"kubernetes.io/projected/2eb42b8d-3238-4559-99c7-92255d22f81a-kube-api-access-ptj7j\") pod \"glance-bc83-account-create-update-6t2vt\" (UID: \"2eb42b8d-3238-4559-99c7-92255d22f81a\") " pod="openstack/glance-bc83-account-create-update-6t2vt" Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.520723 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2eb42b8d-3238-4559-99c7-92255d22f81a-operator-scripts\") pod \"glance-bc83-account-create-update-6t2vt\" (UID: \"2eb42b8d-3238-4559-99c7-92255d22f81a\") " pod="openstack/glance-bc83-account-create-update-6t2vt" Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.521830 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2eb42b8d-3238-4559-99c7-92255d22f81a-operator-scripts\") pod \"glance-bc83-account-create-update-6t2vt\" (UID: \"2eb42b8d-3238-4559-99c7-92255d22f81a\") " pod="openstack/glance-bc83-account-create-update-6t2vt" Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.537765 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptj7j\" (UniqueName: \"kubernetes.io/projected/2eb42b8d-3238-4559-99c7-92255d22f81a-kube-api-access-ptj7j\") pod \"glance-bc83-account-create-update-6t2vt\" (UID: \"2eb42b8d-3238-4559-99c7-92255d22f81a\") " pod="openstack/glance-bc83-account-create-update-6t2vt" Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.633789 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-5llvc" Dec 05 06:10:50 crc kubenswrapper[4742]: I1205 06:10:50.660478 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-bc83-account-create-update-6t2vt" Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.136971 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-bc83-account-create-update-6t2vt"] Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.224752 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-5llvc"] Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.545395 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-9800-account-create-update-gpgxj" Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.640799 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-67gk6\" (UniqueName: \"kubernetes.io/projected/6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6-kube-api-access-67gk6\") pod \"6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6\" (UID: \"6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6\") " Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.640932 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6-operator-scripts\") pod \"6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6\" (UID: \"6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6\") " Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.641712 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6" (UID: "6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.642558 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-75chc" Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.649557 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6-kube-api-access-67gk6" (OuterVolumeSpecName: "kube-api-access-67gk6") pod "6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6" (UID: "6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6"). InnerVolumeSpecName "kube-api-access-67gk6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.702889 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7678-account-create-update-pv5qw" Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.711132 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-mlg7m" Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.742569 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66ddfb11-066b-41f6-8bd0-7248f3cc36ea-operator-scripts\") pod \"66ddfb11-066b-41f6-8bd0-7248f3cc36ea\" (UID: \"66ddfb11-066b-41f6-8bd0-7248f3cc36ea\") " Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.742640 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vz55z\" (UniqueName: \"kubernetes.io/projected/66ddfb11-066b-41f6-8bd0-7248f3cc36ea-kube-api-access-vz55z\") pod \"66ddfb11-066b-41f6-8bd0-7248f3cc36ea\" (UID: \"66ddfb11-066b-41f6-8bd0-7248f3cc36ea\") " Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.743136 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66ddfb11-066b-41f6-8bd0-7248f3cc36ea-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "66ddfb11-066b-41f6-8bd0-7248f3cc36ea" (UID: "66ddfb11-066b-41f6-8bd0-7248f3cc36ea"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.743247 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-67gk6\" (UniqueName: \"kubernetes.io/projected/6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6-kube-api-access-67gk6\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.743270 4742 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.743291 4742 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66ddfb11-066b-41f6-8bd0-7248f3cc36ea-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.746991 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66ddfb11-066b-41f6-8bd0-7248f3cc36ea-kube-api-access-vz55z" (OuterVolumeSpecName: "kube-api-access-vz55z") pod "66ddfb11-066b-41f6-8bd0-7248f3cc36ea" (UID: "66ddfb11-066b-41f6-8bd0-7248f3cc36ea"). InnerVolumeSpecName "kube-api-access-vz55z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.843991 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cf9c88e4-e053-4594-bc1f-176035f2bff7-operator-scripts\") pod \"cf9c88e4-e053-4594-bc1f-176035f2bff7\" (UID: \"cf9c88e4-e053-4594-bc1f-176035f2bff7\") " Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.844075 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d380e842-910a-443b-aa5e-151fa1fe43ea-operator-scripts\") pod \"d380e842-910a-443b-aa5e-151fa1fe43ea\" (UID: \"d380e842-910a-443b-aa5e-151fa1fe43ea\") " Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.844177 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zbgqk\" (UniqueName: \"kubernetes.io/projected/cf9c88e4-e053-4594-bc1f-176035f2bff7-kube-api-access-zbgqk\") pod \"cf9c88e4-e053-4594-bc1f-176035f2bff7\" (UID: \"cf9c88e4-e053-4594-bc1f-176035f2bff7\") " Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.844308 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x9kpq\" (UniqueName: \"kubernetes.io/projected/d380e842-910a-443b-aa5e-151fa1fe43ea-kube-api-access-x9kpq\") pod \"d380e842-910a-443b-aa5e-151fa1fe43ea\" (UID: \"d380e842-910a-443b-aa5e-151fa1fe43ea\") " Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.844764 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vz55z\" (UniqueName: \"kubernetes.io/projected/66ddfb11-066b-41f6-8bd0-7248f3cc36ea-kube-api-access-vz55z\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.844798 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d380e842-910a-443b-aa5e-151fa1fe43ea-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d380e842-910a-443b-aa5e-151fa1fe43ea" (UID: "d380e842-910a-443b-aa5e-151fa1fe43ea"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.845326 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf9c88e4-e053-4594-bc1f-176035f2bff7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "cf9c88e4-e053-4594-bc1f-176035f2bff7" (UID: "cf9c88e4-e053-4594-bc1f-176035f2bff7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.847663 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf9c88e4-e053-4594-bc1f-176035f2bff7-kube-api-access-zbgqk" (OuterVolumeSpecName: "kube-api-access-zbgqk") pod "cf9c88e4-e053-4594-bc1f-176035f2bff7" (UID: "cf9c88e4-e053-4594-bc1f-176035f2bff7"). InnerVolumeSpecName "kube-api-access-zbgqk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.848198 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d380e842-910a-443b-aa5e-151fa1fe43ea-kube-api-access-x9kpq" (OuterVolumeSpecName: "kube-api-access-x9kpq") pod "d380e842-910a-443b-aa5e-151fa1fe43ea" (UID: "d380e842-910a-443b-aa5e-151fa1fe43ea"). InnerVolumeSpecName "kube-api-access-x9kpq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.887685 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7678-account-create-update-pv5qw" event={"ID":"cf9c88e4-e053-4594-bc1f-176035f2bff7","Type":"ContainerDied","Data":"a1016c3679492f0ba39a60a9f649e0aefa001e9c1f0269e4d935c11329b8216d"} Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.887741 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a1016c3679492f0ba39a60a9f649e0aefa001e9c1f0269e4d935c11329b8216d" Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.887699 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7678-account-create-update-pv5qw" Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.889458 4742 generic.go:334] "Generic (PLEG): container finished" podID="2eb42b8d-3238-4559-99c7-92255d22f81a" containerID="3cc80d22c7502d26367a5895602d250fbad727be72075cf30657b1b606d9c71d" exitCode=0 Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.889539 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-bc83-account-create-update-6t2vt" event={"ID":"2eb42b8d-3238-4559-99c7-92255d22f81a","Type":"ContainerDied","Data":"3cc80d22c7502d26367a5895602d250fbad727be72075cf30657b1b606d9c71d"} Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.890019 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-bc83-account-create-update-6t2vt" event={"ID":"2eb42b8d-3238-4559-99c7-92255d22f81a","Type":"ContainerStarted","Data":"75e577aca2c8db38a69444996d9e84ecf0eba8232718991524974d1e7a76a2a6"} Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.891041 4742 generic.go:334] "Generic (PLEG): container finished" podID="1d395b5f-0ac7-4a77-ac68-27bc1b40915f" containerID="24fccc91d8c6dd1906a753332966ef0cd717400a1b2282ac6d7bf3319bb0cf20" exitCode=0 Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.891421 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-5llvc" event={"ID":"1d395b5f-0ac7-4a77-ac68-27bc1b40915f","Type":"ContainerDied","Data":"24fccc91d8c6dd1906a753332966ef0cd717400a1b2282ac6d7bf3319bb0cf20"} Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.891767 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-5llvc" event={"ID":"1d395b5f-0ac7-4a77-ac68-27bc1b40915f","Type":"ContainerStarted","Data":"fda64e4c066321d8889a89c8896865831c29850929e14ca42c226c331d138996"} Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.892446 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-mlg7m" event={"ID":"d380e842-910a-443b-aa5e-151fa1fe43ea","Type":"ContainerDied","Data":"600150eec8f008ae95d274c55cec2e4714d6a32a25e75a67753e67bedb008e68"} Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.892483 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="600150eec8f008ae95d274c55cec2e4714d6a32a25e75a67753e67bedb008e68" Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.892527 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-mlg7m" Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.894196 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-75chc" Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.894240 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-75chc" event={"ID":"66ddfb11-066b-41f6-8bd0-7248f3cc36ea","Type":"ContainerDied","Data":"a5c3a86a9f7073de7183f7f4b8179af08280e0785e85aa1a59af7258b22a18e1"} Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.894259 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a5c3a86a9f7073de7183f7f4b8179af08280e0785e85aa1a59af7258b22a18e1" Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.899769 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-9800-account-create-update-gpgxj" event={"ID":"6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6","Type":"ContainerDied","Data":"436cae8bb715069a8d59dd33f0d6b12b738e9f2f683b338cd6ad565d6dc5b593"} Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.899801 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="436cae8bb715069a8d59dd33f0d6b12b738e9f2f683b338cd6ad565d6dc5b593" Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.899963 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-9800-account-create-update-gpgxj" Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.947314 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x9kpq\" (UniqueName: \"kubernetes.io/projected/d380e842-910a-443b-aa5e-151fa1fe43ea-kube-api-access-x9kpq\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.947371 4742 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cf9c88e4-e053-4594-bc1f-176035f2bff7-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.947391 4742 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d380e842-910a-443b-aa5e-151fa1fe43ea-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:51 crc kubenswrapper[4742]: I1205 06:10:51.947412 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zbgqk\" (UniqueName: \"kubernetes.io/projected/cf9c88e4-e053-4594-bc1f-176035f2bff7-kube-api-access-zbgqk\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:53 crc kubenswrapper[4742]: I1205 06:10:53.296619 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-bc83-account-create-update-6t2vt" Dec 05 06:10:53 crc kubenswrapper[4742]: I1205 06:10:53.303394 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-5llvc" Dec 05 06:10:53 crc kubenswrapper[4742]: I1205 06:10:53.374824 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d395b5f-0ac7-4a77-ac68-27bc1b40915f-operator-scripts\") pod \"1d395b5f-0ac7-4a77-ac68-27bc1b40915f\" (UID: \"1d395b5f-0ac7-4a77-ac68-27bc1b40915f\") " Dec 05 06:10:53 crc kubenswrapper[4742]: I1205 06:10:53.374931 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ptj7j\" (UniqueName: \"kubernetes.io/projected/2eb42b8d-3238-4559-99c7-92255d22f81a-kube-api-access-ptj7j\") pod \"2eb42b8d-3238-4559-99c7-92255d22f81a\" (UID: \"2eb42b8d-3238-4559-99c7-92255d22f81a\") " Dec 05 06:10:53 crc kubenswrapper[4742]: I1205 06:10:53.374983 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2eb42b8d-3238-4559-99c7-92255d22f81a-operator-scripts\") pod \"2eb42b8d-3238-4559-99c7-92255d22f81a\" (UID: \"2eb42b8d-3238-4559-99c7-92255d22f81a\") " Dec 05 06:10:53 crc kubenswrapper[4742]: I1205 06:10:53.375080 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ldjrl\" (UniqueName: \"kubernetes.io/projected/1d395b5f-0ac7-4a77-ac68-27bc1b40915f-kube-api-access-ldjrl\") pod \"1d395b5f-0ac7-4a77-ac68-27bc1b40915f\" (UID: \"1d395b5f-0ac7-4a77-ac68-27bc1b40915f\") " Dec 05 06:10:53 crc kubenswrapper[4742]: I1205 06:10:53.375530 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1d395b5f-0ac7-4a77-ac68-27bc1b40915f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1d395b5f-0ac7-4a77-ac68-27bc1b40915f" (UID: "1d395b5f-0ac7-4a77-ac68-27bc1b40915f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:10:53 crc kubenswrapper[4742]: I1205 06:10:53.375709 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2eb42b8d-3238-4559-99c7-92255d22f81a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2eb42b8d-3238-4559-99c7-92255d22f81a" (UID: "2eb42b8d-3238-4559-99c7-92255d22f81a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:10:53 crc kubenswrapper[4742]: I1205 06:10:53.381296 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d395b5f-0ac7-4a77-ac68-27bc1b40915f-kube-api-access-ldjrl" (OuterVolumeSpecName: "kube-api-access-ldjrl") pod "1d395b5f-0ac7-4a77-ac68-27bc1b40915f" (UID: "1d395b5f-0ac7-4a77-ac68-27bc1b40915f"). InnerVolumeSpecName "kube-api-access-ldjrl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:10:53 crc kubenswrapper[4742]: I1205 06:10:53.381822 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2eb42b8d-3238-4559-99c7-92255d22f81a-kube-api-access-ptj7j" (OuterVolumeSpecName: "kube-api-access-ptj7j") pod "2eb42b8d-3238-4559-99c7-92255d22f81a" (UID: "2eb42b8d-3238-4559-99c7-92255d22f81a"). InnerVolumeSpecName "kube-api-access-ptj7j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:10:53 crc kubenswrapper[4742]: I1205 06:10:53.478095 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ptj7j\" (UniqueName: \"kubernetes.io/projected/2eb42b8d-3238-4559-99c7-92255d22f81a-kube-api-access-ptj7j\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:53 crc kubenswrapper[4742]: I1205 06:10:53.478132 4742 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2eb42b8d-3238-4559-99c7-92255d22f81a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:53 crc kubenswrapper[4742]: I1205 06:10:53.478145 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ldjrl\" (UniqueName: \"kubernetes.io/projected/1d395b5f-0ac7-4a77-ac68-27bc1b40915f-kube-api-access-ldjrl\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:53 crc kubenswrapper[4742]: I1205 06:10:53.478158 4742 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d395b5f-0ac7-4a77-ac68-27bc1b40915f-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:53 crc kubenswrapper[4742]: E1205 06:10:53.884842 4742 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 06:10:53 crc kubenswrapper[4742]: E1205 06:10:53.884885 4742 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 06:10:53 crc kubenswrapper[4742]: E1205 06:10:53.884962 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-etc-swift podName:f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b nodeName:}" failed. No retries permitted until 2025-12-05 06:11:09.884938912 +0000 UTC m=+1145.797074014 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-etc-swift") pod "swift-storage-0" (UID: "f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b") : configmap "swift-ring-files" not found Dec 05 06:10:53 crc kubenswrapper[4742]: I1205 06:10:53.884645 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-etc-swift\") pod \"swift-storage-0\" (UID: \"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b\") " pod="openstack/swift-storage-0" Dec 05 06:10:53 crc kubenswrapper[4742]: I1205 06:10:53.923967 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-bc83-account-create-update-6t2vt" event={"ID":"2eb42b8d-3238-4559-99c7-92255d22f81a","Type":"ContainerDied","Data":"75e577aca2c8db38a69444996d9e84ecf0eba8232718991524974d1e7a76a2a6"} Dec 05 06:10:53 crc kubenswrapper[4742]: I1205 06:10:53.924007 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-bc83-account-create-update-6t2vt" Dec 05 06:10:53 crc kubenswrapper[4742]: I1205 06:10:53.924800 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="75e577aca2c8db38a69444996d9e84ecf0eba8232718991524974d1e7a76a2a6" Dec 05 06:10:53 crc kubenswrapper[4742]: I1205 06:10:53.926696 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-5llvc" event={"ID":"1d395b5f-0ac7-4a77-ac68-27bc1b40915f","Type":"ContainerDied","Data":"fda64e4c066321d8889a89c8896865831c29850929e14ca42c226c331d138996"} Dec 05 06:10:53 crc kubenswrapper[4742]: I1205 06:10:53.926747 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fda64e4c066321d8889a89c8896865831c29850929e14ca42c226c331d138996" Dec 05 06:10:53 crc kubenswrapper[4742]: I1205 06:10:53.926824 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-5llvc" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.487321 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-9n84z" podUID="b5df8784-b63d-41b7-a542-dcf53ea6cc5e" containerName="ovn-controller" probeResult="failure" output=< Dec 05 06:10:55 crc kubenswrapper[4742]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 05 06:10:55 crc kubenswrapper[4742]: > Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.514729 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-tgnp6" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.522493 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-tgnp6" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.602143 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-f8hfs"] Dec 05 06:10:55 crc kubenswrapper[4742]: E1205 06:10:55.602496 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66ddfb11-066b-41f6-8bd0-7248f3cc36ea" containerName="mariadb-database-create" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.602509 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="66ddfb11-066b-41f6-8bd0-7248f3cc36ea" containerName="mariadb-database-create" Dec 05 06:10:55 crc kubenswrapper[4742]: E1205 06:10:55.602521 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6" containerName="mariadb-account-create-update" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.602527 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6" containerName="mariadb-account-create-update" Dec 05 06:10:55 crc kubenswrapper[4742]: E1205 06:10:55.602541 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d380e842-910a-443b-aa5e-151fa1fe43ea" containerName="mariadb-database-create" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.602547 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="d380e842-910a-443b-aa5e-151fa1fe43ea" containerName="mariadb-database-create" Dec 05 06:10:55 crc kubenswrapper[4742]: E1205 06:10:55.602556 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d395b5f-0ac7-4a77-ac68-27bc1b40915f" containerName="mariadb-database-create" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.602561 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d395b5f-0ac7-4a77-ac68-27bc1b40915f" containerName="mariadb-database-create" Dec 05 06:10:55 crc kubenswrapper[4742]: E1205 06:10:55.602575 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf9c88e4-e053-4594-bc1f-176035f2bff7" containerName="mariadb-account-create-update" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.602581 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf9c88e4-e053-4594-bc1f-176035f2bff7" containerName="mariadb-account-create-update" Dec 05 06:10:55 crc kubenswrapper[4742]: E1205 06:10:55.602594 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2eb42b8d-3238-4559-99c7-92255d22f81a" containerName="mariadb-account-create-update" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.602600 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="2eb42b8d-3238-4559-99c7-92255d22f81a" containerName="mariadb-account-create-update" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.602758 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="d380e842-910a-443b-aa5e-151fa1fe43ea" containerName="mariadb-database-create" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.602767 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6" containerName="mariadb-account-create-update" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.602776 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="2eb42b8d-3238-4559-99c7-92255d22f81a" containerName="mariadb-account-create-update" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.602788 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d395b5f-0ac7-4a77-ac68-27bc1b40915f" containerName="mariadb-database-create" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.602805 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="66ddfb11-066b-41f6-8bd0-7248f3cc36ea" containerName="mariadb-database-create" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.602814 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf9c88e4-e053-4594-bc1f-176035f2bff7" containerName="mariadb-account-create-update" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.604068 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-f8hfs" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.610793 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-f8hfs"] Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.632886 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.633050 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-jk4gs" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.736621 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c7413df9-1f00-41d2-8ef1-e85a83fd6eac-db-sync-config-data\") pod \"glance-db-sync-f8hfs\" (UID: \"c7413df9-1f00-41d2-8ef1-e85a83fd6eac\") " pod="openstack/glance-db-sync-f8hfs" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.736839 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7413df9-1f00-41d2-8ef1-e85a83fd6eac-combined-ca-bundle\") pod \"glance-db-sync-f8hfs\" (UID: \"c7413df9-1f00-41d2-8ef1-e85a83fd6eac\") " pod="openstack/glance-db-sync-f8hfs" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.736948 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9258\" (UniqueName: \"kubernetes.io/projected/c7413df9-1f00-41d2-8ef1-e85a83fd6eac-kube-api-access-k9258\") pod \"glance-db-sync-f8hfs\" (UID: \"c7413df9-1f00-41d2-8ef1-e85a83fd6eac\") " pod="openstack/glance-db-sync-f8hfs" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.737089 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7413df9-1f00-41d2-8ef1-e85a83fd6eac-config-data\") pod \"glance-db-sync-f8hfs\" (UID: \"c7413df9-1f00-41d2-8ef1-e85a83fd6eac\") " pod="openstack/glance-db-sync-f8hfs" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.839184 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c7413df9-1f00-41d2-8ef1-e85a83fd6eac-db-sync-config-data\") pod \"glance-db-sync-f8hfs\" (UID: \"c7413df9-1f00-41d2-8ef1-e85a83fd6eac\") " pod="openstack/glance-db-sync-f8hfs" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.839372 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7413df9-1f00-41d2-8ef1-e85a83fd6eac-combined-ca-bundle\") pod \"glance-db-sync-f8hfs\" (UID: \"c7413df9-1f00-41d2-8ef1-e85a83fd6eac\") " pod="openstack/glance-db-sync-f8hfs" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.839452 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9258\" (UniqueName: \"kubernetes.io/projected/c7413df9-1f00-41d2-8ef1-e85a83fd6eac-kube-api-access-k9258\") pod \"glance-db-sync-f8hfs\" (UID: \"c7413df9-1f00-41d2-8ef1-e85a83fd6eac\") " pod="openstack/glance-db-sync-f8hfs" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.839579 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7413df9-1f00-41d2-8ef1-e85a83fd6eac-config-data\") pod \"glance-db-sync-f8hfs\" (UID: \"c7413df9-1f00-41d2-8ef1-e85a83fd6eac\") " pod="openstack/glance-db-sync-f8hfs" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.841584 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-9n84z-config-kmrmd"] Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.843336 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9n84z-config-kmrmd" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.847420 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7413df9-1f00-41d2-8ef1-e85a83fd6eac-combined-ca-bundle\") pod \"glance-db-sync-f8hfs\" (UID: \"c7413df9-1f00-41d2-8ef1-e85a83fd6eac\") " pod="openstack/glance-db-sync-f8hfs" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.848262 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7413df9-1f00-41d2-8ef1-e85a83fd6eac-config-data\") pod \"glance-db-sync-f8hfs\" (UID: \"c7413df9-1f00-41d2-8ef1-e85a83fd6eac\") " pod="openstack/glance-db-sync-f8hfs" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.849110 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.867478 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-9n84z-config-kmrmd"] Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.868074 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c7413df9-1f00-41d2-8ef1-e85a83fd6eac-db-sync-config-data\") pod \"glance-db-sync-f8hfs\" (UID: \"c7413df9-1f00-41d2-8ef1-e85a83fd6eac\") " pod="openstack/glance-db-sync-f8hfs" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.875608 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9258\" (UniqueName: \"kubernetes.io/projected/c7413df9-1f00-41d2-8ef1-e85a83fd6eac-kube-api-access-k9258\") pod \"glance-db-sync-f8hfs\" (UID: \"c7413df9-1f00-41d2-8ef1-e85a83fd6eac\") " pod="openstack/glance-db-sync-f8hfs" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.941292 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/97635213-f3d8-434d-9f8c-27427ff42464-var-log-ovn\") pod \"ovn-controller-9n84z-config-kmrmd\" (UID: \"97635213-f3d8-434d-9f8c-27427ff42464\") " pod="openstack/ovn-controller-9n84z-config-kmrmd" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.941386 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c74w2\" (UniqueName: \"kubernetes.io/projected/97635213-f3d8-434d-9f8c-27427ff42464-kube-api-access-c74w2\") pod \"ovn-controller-9n84z-config-kmrmd\" (UID: \"97635213-f3d8-434d-9f8c-27427ff42464\") " pod="openstack/ovn-controller-9n84z-config-kmrmd" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.941418 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/97635213-f3d8-434d-9f8c-27427ff42464-additional-scripts\") pod \"ovn-controller-9n84z-config-kmrmd\" (UID: \"97635213-f3d8-434d-9f8c-27427ff42464\") " pod="openstack/ovn-controller-9n84z-config-kmrmd" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.941498 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/97635213-f3d8-434d-9f8c-27427ff42464-scripts\") pod \"ovn-controller-9n84z-config-kmrmd\" (UID: \"97635213-f3d8-434d-9f8c-27427ff42464\") " pod="openstack/ovn-controller-9n84z-config-kmrmd" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.941533 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/97635213-f3d8-434d-9f8c-27427ff42464-var-run-ovn\") pod \"ovn-controller-9n84z-config-kmrmd\" (UID: \"97635213-f3d8-434d-9f8c-27427ff42464\") " pod="openstack/ovn-controller-9n84z-config-kmrmd" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.941714 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/97635213-f3d8-434d-9f8c-27427ff42464-var-run\") pod \"ovn-controller-9n84z-config-kmrmd\" (UID: \"97635213-f3d8-434d-9f8c-27427ff42464\") " pod="openstack/ovn-controller-9n84z-config-kmrmd" Dec 05 06:10:55 crc kubenswrapper[4742]: I1205 06:10:55.946756 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-f8hfs" Dec 05 06:10:56 crc kubenswrapper[4742]: I1205 06:10:56.043174 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/97635213-f3d8-434d-9f8c-27427ff42464-scripts\") pod \"ovn-controller-9n84z-config-kmrmd\" (UID: \"97635213-f3d8-434d-9f8c-27427ff42464\") " pod="openstack/ovn-controller-9n84z-config-kmrmd" Dec 05 06:10:56 crc kubenswrapper[4742]: I1205 06:10:56.043506 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/97635213-f3d8-434d-9f8c-27427ff42464-var-run-ovn\") pod \"ovn-controller-9n84z-config-kmrmd\" (UID: \"97635213-f3d8-434d-9f8c-27427ff42464\") " pod="openstack/ovn-controller-9n84z-config-kmrmd" Dec 05 06:10:56 crc kubenswrapper[4742]: I1205 06:10:56.043550 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/97635213-f3d8-434d-9f8c-27427ff42464-var-run\") pod \"ovn-controller-9n84z-config-kmrmd\" (UID: \"97635213-f3d8-434d-9f8c-27427ff42464\") " pod="openstack/ovn-controller-9n84z-config-kmrmd" Dec 05 06:10:56 crc kubenswrapper[4742]: I1205 06:10:56.043600 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/97635213-f3d8-434d-9f8c-27427ff42464-var-log-ovn\") pod \"ovn-controller-9n84z-config-kmrmd\" (UID: \"97635213-f3d8-434d-9f8c-27427ff42464\") " pod="openstack/ovn-controller-9n84z-config-kmrmd" Dec 05 06:10:56 crc kubenswrapper[4742]: I1205 06:10:56.043663 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c74w2\" (UniqueName: \"kubernetes.io/projected/97635213-f3d8-434d-9f8c-27427ff42464-kube-api-access-c74w2\") pod \"ovn-controller-9n84z-config-kmrmd\" (UID: \"97635213-f3d8-434d-9f8c-27427ff42464\") " pod="openstack/ovn-controller-9n84z-config-kmrmd" Dec 05 06:10:56 crc kubenswrapper[4742]: I1205 06:10:56.043691 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/97635213-f3d8-434d-9f8c-27427ff42464-additional-scripts\") pod \"ovn-controller-9n84z-config-kmrmd\" (UID: \"97635213-f3d8-434d-9f8c-27427ff42464\") " pod="openstack/ovn-controller-9n84z-config-kmrmd" Dec 05 06:10:56 crc kubenswrapper[4742]: I1205 06:10:56.043915 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/97635213-f3d8-434d-9f8c-27427ff42464-var-log-ovn\") pod \"ovn-controller-9n84z-config-kmrmd\" (UID: \"97635213-f3d8-434d-9f8c-27427ff42464\") " pod="openstack/ovn-controller-9n84z-config-kmrmd" Dec 05 06:10:56 crc kubenswrapper[4742]: I1205 06:10:56.043954 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/97635213-f3d8-434d-9f8c-27427ff42464-var-run\") pod \"ovn-controller-9n84z-config-kmrmd\" (UID: \"97635213-f3d8-434d-9f8c-27427ff42464\") " pod="openstack/ovn-controller-9n84z-config-kmrmd" Dec 05 06:10:56 crc kubenswrapper[4742]: I1205 06:10:56.045765 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/97635213-f3d8-434d-9f8c-27427ff42464-additional-scripts\") pod \"ovn-controller-9n84z-config-kmrmd\" (UID: \"97635213-f3d8-434d-9f8c-27427ff42464\") " pod="openstack/ovn-controller-9n84z-config-kmrmd" Dec 05 06:10:56 crc kubenswrapper[4742]: I1205 06:10:56.045867 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/97635213-f3d8-434d-9f8c-27427ff42464-scripts\") pod \"ovn-controller-9n84z-config-kmrmd\" (UID: \"97635213-f3d8-434d-9f8c-27427ff42464\") " pod="openstack/ovn-controller-9n84z-config-kmrmd" Dec 05 06:10:56 crc kubenswrapper[4742]: I1205 06:10:56.045915 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/97635213-f3d8-434d-9f8c-27427ff42464-var-run-ovn\") pod \"ovn-controller-9n84z-config-kmrmd\" (UID: \"97635213-f3d8-434d-9f8c-27427ff42464\") " pod="openstack/ovn-controller-9n84z-config-kmrmd" Dec 05 06:10:56 crc kubenswrapper[4742]: I1205 06:10:56.066050 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c74w2\" (UniqueName: \"kubernetes.io/projected/97635213-f3d8-434d-9f8c-27427ff42464-kube-api-access-c74w2\") pod \"ovn-controller-9n84z-config-kmrmd\" (UID: \"97635213-f3d8-434d-9f8c-27427ff42464\") " pod="openstack/ovn-controller-9n84z-config-kmrmd" Dec 05 06:10:56 crc kubenswrapper[4742]: I1205 06:10:56.234423 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9n84z-config-kmrmd" Dec 05 06:10:56 crc kubenswrapper[4742]: I1205 06:10:56.493414 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-f8hfs"] Dec 05 06:10:56 crc kubenswrapper[4742]: W1205 06:10:56.497600 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc7413df9_1f00_41d2_8ef1_e85a83fd6eac.slice/crio-baa8b224d27c0b5cb58d9d2b7e21399d7177d64b1ac8d63eae15aae506603feb WatchSource:0}: Error finding container baa8b224d27c0b5cb58d9d2b7e21399d7177d64b1ac8d63eae15aae506603feb: Status 404 returned error can't find the container with id baa8b224d27c0b5cb58d9d2b7e21399d7177d64b1ac8d63eae15aae506603feb Dec 05 06:10:56 crc kubenswrapper[4742]: I1205 06:10:56.749789 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-9n84z-config-kmrmd"] Dec 05 06:10:56 crc kubenswrapper[4742]: W1205 06:10:56.754135 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod97635213_f3d8_434d_9f8c_27427ff42464.slice/crio-b85021400665aff1213bf342210ac88562751b85fbc29f58debf539c3347924b WatchSource:0}: Error finding container b85021400665aff1213bf342210ac88562751b85fbc29f58debf539c3347924b: Status 404 returned error can't find the container with id b85021400665aff1213bf342210ac88562751b85fbc29f58debf539c3347924b Dec 05 06:10:56 crc kubenswrapper[4742]: I1205 06:10:56.952952 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-9n84z-config-kmrmd" event={"ID":"97635213-f3d8-434d-9f8c-27427ff42464","Type":"ContainerStarted","Data":"b85021400665aff1213bf342210ac88562751b85fbc29f58debf539c3347924b"} Dec 05 06:10:56 crc kubenswrapper[4742]: I1205 06:10:56.954793 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-f8hfs" event={"ID":"c7413df9-1f00-41d2-8ef1-e85a83fd6eac","Type":"ContainerStarted","Data":"baa8b224d27c0b5cb58d9d2b7e21399d7177d64b1ac8d63eae15aae506603feb"} Dec 05 06:10:57 crc kubenswrapper[4742]: I1205 06:10:57.965114 4742 generic.go:334] "Generic (PLEG): container finished" podID="52759157-a5b0-481a-9128-ee595e269af9" containerID="2fb3977eb90709f749280ec6d06d71a5471a123464a4381ce4aa664c99c814b2" exitCode=0 Dec 05 06:10:57 crc kubenswrapper[4742]: I1205 06:10:57.965182 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-vcv4d" event={"ID":"52759157-a5b0-481a-9128-ee595e269af9","Type":"ContainerDied","Data":"2fb3977eb90709f749280ec6d06d71a5471a123464a4381ce4aa664c99c814b2"} Dec 05 06:10:57 crc kubenswrapper[4742]: I1205 06:10:57.970106 4742 generic.go:334] "Generic (PLEG): container finished" podID="97635213-f3d8-434d-9f8c-27427ff42464" containerID="910e17e6a6232277e245b4de61db21d9407700084a851b426cc8c12da12d003b" exitCode=0 Dec 05 06:10:57 crc kubenswrapper[4742]: I1205 06:10:57.970135 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-9n84z-config-kmrmd" event={"ID":"97635213-f3d8-434d-9f8c-27427ff42464","Type":"ContainerDied","Data":"910e17e6a6232277e245b4de61db21d9407700084a851b426cc8c12da12d003b"} Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.434647 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9n84z-config-kmrmd" Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.441328 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-vcv4d" Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.503110 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/97635213-f3d8-434d-9f8c-27427ff42464-var-log-ovn\") pod \"97635213-f3d8-434d-9f8c-27427ff42464\" (UID: \"97635213-f3d8-434d-9f8c-27427ff42464\") " Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.503168 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/97635213-f3d8-434d-9f8c-27427ff42464-var-run\") pod \"97635213-f3d8-434d-9f8c-27427ff42464\" (UID: \"97635213-f3d8-434d-9f8c-27427ff42464\") " Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.503215 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/52759157-a5b0-481a-9128-ee595e269af9-dispersionconf\") pod \"52759157-a5b0-481a-9128-ee595e269af9\" (UID: \"52759157-a5b0-481a-9128-ee595e269af9\") " Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.503218 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/97635213-f3d8-434d-9f8c-27427ff42464-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "97635213-f3d8-434d-9f8c-27427ff42464" (UID: "97635213-f3d8-434d-9f8c-27427ff42464"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.503292 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/97635213-f3d8-434d-9f8c-27427ff42464-scripts\") pod \"97635213-f3d8-434d-9f8c-27427ff42464\" (UID: \"97635213-f3d8-434d-9f8c-27427ff42464\") " Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.503316 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/97635213-f3d8-434d-9f8c-27427ff42464-var-run" (OuterVolumeSpecName: "var-run") pod "97635213-f3d8-434d-9f8c-27427ff42464" (UID: "97635213-f3d8-434d-9f8c-27427ff42464"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.503359 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/52759157-a5b0-481a-9128-ee595e269af9-etc-swift\") pod \"52759157-a5b0-481a-9128-ee595e269af9\" (UID: \"52759157-a5b0-481a-9128-ee595e269af9\") " Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.503388 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/52759157-a5b0-481a-9128-ee595e269af9-ring-data-devices\") pod \"52759157-a5b0-481a-9128-ee595e269af9\" (UID: \"52759157-a5b0-481a-9128-ee595e269af9\") " Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.503445 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/52759157-a5b0-481a-9128-ee595e269af9-swiftconf\") pod \"52759157-a5b0-481a-9128-ee595e269af9\" (UID: \"52759157-a5b0-481a-9128-ee595e269af9\") " Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.503472 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52759157-a5b0-481a-9128-ee595e269af9-combined-ca-bundle\") pod \"52759157-a5b0-481a-9128-ee595e269af9\" (UID: \"52759157-a5b0-481a-9128-ee595e269af9\") " Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.503494 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/97635213-f3d8-434d-9f8c-27427ff42464-var-run-ovn\") pod \"97635213-f3d8-434d-9f8c-27427ff42464\" (UID: \"97635213-f3d8-434d-9f8c-27427ff42464\") " Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.503529 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c74w2\" (UniqueName: \"kubernetes.io/projected/97635213-f3d8-434d-9f8c-27427ff42464-kube-api-access-c74w2\") pod \"97635213-f3d8-434d-9f8c-27427ff42464\" (UID: \"97635213-f3d8-434d-9f8c-27427ff42464\") " Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.503570 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/97635213-f3d8-434d-9f8c-27427ff42464-additional-scripts\") pod \"97635213-f3d8-434d-9f8c-27427ff42464\" (UID: \"97635213-f3d8-434d-9f8c-27427ff42464\") " Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.503595 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/52759157-a5b0-481a-9128-ee595e269af9-scripts\") pod \"52759157-a5b0-481a-9128-ee595e269af9\" (UID: \"52759157-a5b0-481a-9128-ee595e269af9\") " Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.503645 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/97635213-f3d8-434d-9f8c-27427ff42464-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "97635213-f3d8-434d-9f8c-27427ff42464" (UID: "97635213-f3d8-434d-9f8c-27427ff42464"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.503674 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xx7qz\" (UniqueName: \"kubernetes.io/projected/52759157-a5b0-481a-9128-ee595e269af9-kube-api-access-xx7qz\") pod \"52759157-a5b0-481a-9128-ee595e269af9\" (UID: \"52759157-a5b0-481a-9128-ee595e269af9\") " Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.504394 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/52759157-a5b0-481a-9128-ee595e269af9-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "52759157-a5b0-481a-9128-ee595e269af9" (UID: "52759157-a5b0-481a-9128-ee595e269af9"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.504402 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/97635213-f3d8-434d-9f8c-27427ff42464-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "97635213-f3d8-434d-9f8c-27427ff42464" (UID: "97635213-f3d8-434d-9f8c-27427ff42464"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.504573 4742 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/97635213-f3d8-434d-9f8c-27427ff42464-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.504784 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52759157-a5b0-481a-9128-ee595e269af9-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "52759157-a5b0-481a-9128-ee595e269af9" (UID: "52759157-a5b0-481a-9128-ee595e269af9"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.505180 4742 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/97635213-f3d8-434d-9f8c-27427ff42464-var-run\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.505526 4742 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/97635213-f3d8-434d-9f8c-27427ff42464-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.506003 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/97635213-f3d8-434d-9f8c-27427ff42464-scripts" (OuterVolumeSpecName: "scripts") pod "97635213-f3d8-434d-9f8c-27427ff42464" (UID: "97635213-f3d8-434d-9f8c-27427ff42464"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.510232 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52759157-a5b0-481a-9128-ee595e269af9-kube-api-access-xx7qz" (OuterVolumeSpecName: "kube-api-access-xx7qz") pod "52759157-a5b0-481a-9128-ee595e269af9" (UID: "52759157-a5b0-481a-9128-ee595e269af9"). InnerVolumeSpecName "kube-api-access-xx7qz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.510935 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97635213-f3d8-434d-9f8c-27427ff42464-kube-api-access-c74w2" (OuterVolumeSpecName: "kube-api-access-c74w2") pod "97635213-f3d8-434d-9f8c-27427ff42464" (UID: "97635213-f3d8-434d-9f8c-27427ff42464"). InnerVolumeSpecName "kube-api-access-c74w2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.513227 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52759157-a5b0-481a-9128-ee595e269af9-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "52759157-a5b0-481a-9128-ee595e269af9" (UID: "52759157-a5b0-481a-9128-ee595e269af9"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.526381 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/52759157-a5b0-481a-9128-ee595e269af9-scripts" (OuterVolumeSpecName: "scripts") pod "52759157-a5b0-481a-9128-ee595e269af9" (UID: "52759157-a5b0-481a-9128-ee595e269af9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.533860 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52759157-a5b0-481a-9128-ee595e269af9-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "52759157-a5b0-481a-9128-ee595e269af9" (UID: "52759157-a5b0-481a-9128-ee595e269af9"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.534585 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52759157-a5b0-481a-9128-ee595e269af9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "52759157-a5b0-481a-9128-ee595e269af9" (UID: "52759157-a5b0-481a-9128-ee595e269af9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.607138 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xx7qz\" (UniqueName: \"kubernetes.io/projected/52759157-a5b0-481a-9128-ee595e269af9-kube-api-access-xx7qz\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.607187 4742 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/52759157-a5b0-481a-9128-ee595e269af9-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.607198 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/97635213-f3d8-434d-9f8c-27427ff42464-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.607206 4742 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/52759157-a5b0-481a-9128-ee595e269af9-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.607215 4742 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/52759157-a5b0-481a-9128-ee595e269af9-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.607223 4742 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/52759157-a5b0-481a-9128-ee595e269af9-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.607231 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52759157-a5b0-481a-9128-ee595e269af9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.607240 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c74w2\" (UniqueName: \"kubernetes.io/projected/97635213-f3d8-434d-9f8c-27427ff42464-kube-api-access-c74w2\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.607249 4742 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/97635213-f3d8-434d-9f8c-27427ff42464-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:10:59 crc kubenswrapper[4742]: I1205 06:10:59.607257 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/52759157-a5b0-481a-9128-ee595e269af9-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:00 crc kubenswrapper[4742]: I1205 06:11:00.003732 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-vcv4d" event={"ID":"52759157-a5b0-481a-9128-ee595e269af9","Type":"ContainerDied","Data":"dbab7e307227c706f2bfaa06abfe46fbc0143e625c49cb9d747748af7d6b0e66"} Dec 05 06:11:00 crc kubenswrapper[4742]: I1205 06:11:00.003780 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dbab7e307227c706f2bfaa06abfe46fbc0143e625c49cb9d747748af7d6b0e66" Dec 05 06:11:00 crc kubenswrapper[4742]: I1205 06:11:00.003744 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-vcv4d" Dec 05 06:11:00 crc kubenswrapper[4742]: I1205 06:11:00.005529 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-9n84z-config-kmrmd" event={"ID":"97635213-f3d8-434d-9f8c-27427ff42464","Type":"ContainerDied","Data":"b85021400665aff1213bf342210ac88562751b85fbc29f58debf539c3347924b"} Dec 05 06:11:00 crc kubenswrapper[4742]: I1205 06:11:00.005582 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b85021400665aff1213bf342210ac88562751b85fbc29f58debf539c3347924b" Dec 05 06:11:00 crc kubenswrapper[4742]: I1205 06:11:00.005590 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9n84z-config-kmrmd" Dec 05 06:11:00 crc kubenswrapper[4742]: I1205 06:11:00.522734 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-9n84z" Dec 05 06:11:00 crc kubenswrapper[4742]: I1205 06:11:00.578490 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-9n84z-config-kmrmd"] Dec 05 06:11:00 crc kubenswrapper[4742]: I1205 06:11:00.584611 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-9n84z-config-kmrmd"] Dec 05 06:11:01 crc kubenswrapper[4742]: I1205 06:11:01.013505 4742 generic.go:334] "Generic (PLEG): container finished" podID="d6b096f4-483e-48c5-a3e1-a178c0c5ae6e" containerID="e37747c80db44fb441e66ad6045bb07df3a0d78b12b382201b8a54d8b6957d0b" exitCode=0 Dec 05 06:11:01 crc kubenswrapper[4742]: I1205 06:11:01.013549 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e","Type":"ContainerDied","Data":"e37747c80db44fb441e66ad6045bb07df3a0d78b12b382201b8a54d8b6957d0b"} Dec 05 06:11:02 crc kubenswrapper[4742]: I1205 06:11:02.392779 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97635213-f3d8-434d-9f8c-27427ff42464" path="/var/lib/kubelet/pods/97635213-f3d8-434d-9f8c-27427ff42464/volumes" Dec 05 06:11:03 crc kubenswrapper[4742]: I1205 06:11:03.036979 4742 generic.go:334] "Generic (PLEG): container finished" podID="7b5d8165-e06e-4600-9cab-9cf84c010725" containerID="2a73506fa683772c445e145b7336056dc8c87df69830067f0bc2e973540b7546" exitCode=0 Dec 05 06:11:03 crc kubenswrapper[4742]: I1205 06:11:03.037028 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7b5d8165-e06e-4600-9cab-9cf84c010725","Type":"ContainerDied","Data":"2a73506fa683772c445e145b7336056dc8c87df69830067f0bc2e973540b7546"} Dec 05 06:11:09 crc kubenswrapper[4742]: I1205 06:11:09.090539 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7b5d8165-e06e-4600-9cab-9cf84c010725","Type":"ContainerStarted","Data":"4c9ed2559817c2da1b28311959a187477072585e1e74ef4ffe26d1ce23f9ee55"} Dec 05 06:11:09 crc kubenswrapper[4742]: I1205 06:11:09.091318 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:11:09 crc kubenswrapper[4742]: I1205 06:11:09.092222 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-f8hfs" event={"ID":"c7413df9-1f00-41d2-8ef1-e85a83fd6eac","Type":"ContainerStarted","Data":"36db8da42759bb13adccb6c0c7096e6756878db45e78c24b62c01ebefc0b14b1"} Dec 05 06:11:09 crc kubenswrapper[4742]: I1205 06:11:09.094945 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e","Type":"ContainerStarted","Data":"9049700e89ccd644394c6fda74ff3a49949e7dbd626334fac51902707979e0d4"} Dec 05 06:11:09 crc kubenswrapper[4742]: I1205 06:11:09.095250 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 05 06:11:09 crc kubenswrapper[4742]: I1205 06:11:09.125046 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=-9223371947.729755 podStartE2EDuration="1m29.125019725s" podCreationTimestamp="2025-12-05 06:09:40 +0000 UTC" firstStartedPulling="2025-12-05 06:09:42.752317889 +0000 UTC m=+1058.664452951" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:11:09.112848891 +0000 UTC m=+1145.024983963" watchObservedRunningTime="2025-12-05 06:11:09.125019725 +0000 UTC m=+1145.037154817" Dec 05 06:11:09 crc kubenswrapper[4742]: I1205 06:11:09.150860 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=44.727391424 podStartE2EDuration="1m29.15084203s" podCreationTimestamp="2025-12-05 06:09:40 +0000 UTC" firstStartedPulling="2025-12-05 06:09:42.421278313 +0000 UTC m=+1058.333413365" lastFinishedPulling="2025-12-05 06:10:26.844728909 +0000 UTC m=+1102.756863971" observedRunningTime="2025-12-05 06:11:09.140895016 +0000 UTC m=+1145.053030108" watchObservedRunningTime="2025-12-05 06:11:09.15084203 +0000 UTC m=+1145.062977092" Dec 05 06:11:09 crc kubenswrapper[4742]: I1205 06:11:09.162912 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-f8hfs" podStartSLOduration=2.218515542 podStartE2EDuration="14.16289151s" podCreationTimestamp="2025-12-05 06:10:55 +0000 UTC" firstStartedPulling="2025-12-05 06:10:56.501391958 +0000 UTC m=+1132.413527020" lastFinishedPulling="2025-12-05 06:11:08.445767916 +0000 UTC m=+1144.357902988" observedRunningTime="2025-12-05 06:11:09.158729449 +0000 UTC m=+1145.070864521" watchObservedRunningTime="2025-12-05 06:11:09.16289151 +0000 UTC m=+1145.075026592" Dec 05 06:11:09 crc kubenswrapper[4742]: I1205 06:11:09.895086 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-etc-swift\") pod \"swift-storage-0\" (UID: \"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b\") " pod="openstack/swift-storage-0" Dec 05 06:11:09 crc kubenswrapper[4742]: I1205 06:11:09.900245 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-etc-swift\") pod \"swift-storage-0\" (UID: \"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b\") " pod="openstack/swift-storage-0" Dec 05 06:11:10 crc kubenswrapper[4742]: I1205 06:11:10.108964 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 05 06:11:10 crc kubenswrapper[4742]: I1205 06:11:10.635636 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 05 06:11:10 crc kubenswrapper[4742]: W1205 06:11:10.644445 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf6d1bf24_115c_4dd5_8dcd_84a3b4e7456b.slice/crio-b6d1619bc68a6a3e38a75cf2569fbb323e55a9bf3056bb602d712f91b78b7a22 WatchSource:0}: Error finding container b6d1619bc68a6a3e38a75cf2569fbb323e55a9bf3056bb602d712f91b78b7a22: Status 404 returned error can't find the container with id b6d1619bc68a6a3e38a75cf2569fbb323e55a9bf3056bb602d712f91b78b7a22 Dec 05 06:11:11 crc kubenswrapper[4742]: I1205 06:11:11.122418 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b","Type":"ContainerStarted","Data":"b6d1619bc68a6a3e38a75cf2569fbb323e55a9bf3056bb602d712f91b78b7a22"} Dec 05 06:11:13 crc kubenswrapper[4742]: I1205 06:11:13.139042 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b","Type":"ContainerStarted","Data":"c5fb081f824bef8d38c8af954df8461ee3d95ecdcf24abe5f14ab12b9b79eaaf"} Dec 05 06:11:13 crc kubenswrapper[4742]: I1205 06:11:13.139628 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b","Type":"ContainerStarted","Data":"662fdbccb819aa757a5eacbc682c4c9d90ae7096d453acac04fbb8d2c2d724e9"} Dec 05 06:11:14 crc kubenswrapper[4742]: I1205 06:11:14.150987 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b","Type":"ContainerStarted","Data":"a572a48ba1392fc5a8267bbf98db108fc184bd4381327a56c58c1ab6e32e931f"} Dec 05 06:11:15 crc kubenswrapper[4742]: I1205 06:11:15.166821 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b","Type":"ContainerStarted","Data":"f326ad43ebacf85ccc332a8b35e3c1fcca45c28c2a4b229df2457a4983e5ac64"} Dec 05 06:11:16 crc kubenswrapper[4742]: I1205 06:11:16.178382 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b","Type":"ContainerStarted","Data":"0eaa57a7d1edb43a21fc4813afcd8bd8362171ef4a64691cceb98492dc6baccb"} Dec 05 06:11:16 crc kubenswrapper[4742]: I1205 06:11:16.178718 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b","Type":"ContainerStarted","Data":"9b48fcbbeeb0dfa6813285a8982e885fd781741008cdbdef2351e5277caa44d7"} Dec 05 06:11:16 crc kubenswrapper[4742]: I1205 06:11:16.670955 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:11:16 crc kubenswrapper[4742]: I1205 06:11:16.671367 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:11:17 crc kubenswrapper[4742]: I1205 06:11:17.196536 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b","Type":"ContainerStarted","Data":"10dd23759afdac3e2bd7b9f5ad1e8df111f57ac5da85f46f8da24ff04f9269b3"} Dec 05 06:11:17 crc kubenswrapper[4742]: I1205 06:11:17.196588 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b","Type":"ContainerStarted","Data":"a0061dd4be94377433ec372b482fc15e1700f814f12276d54dcb9692d64d5aab"} Dec 05 06:11:18 crc kubenswrapper[4742]: I1205 06:11:18.210995 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b","Type":"ContainerStarted","Data":"76a0ea55014e165c32a9608cf728f086e8082a97b97de1dd0fdb8cb27caa0a7e"} Dec 05 06:11:18 crc kubenswrapper[4742]: I1205 06:11:18.211419 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b","Type":"ContainerStarted","Data":"89f787d3dcbe6d0e4ec54aa9195f1fe45b50844797a04c3b326966f17201a671"} Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.221461 4742 generic.go:334] "Generic (PLEG): container finished" podID="c7413df9-1f00-41d2-8ef1-e85a83fd6eac" containerID="36db8da42759bb13adccb6c0c7096e6756878db45e78c24b62c01ebefc0b14b1" exitCode=0 Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.221628 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-f8hfs" event={"ID":"c7413df9-1f00-41d2-8ef1-e85a83fd6eac","Type":"ContainerDied","Data":"36db8da42759bb13adccb6c0c7096e6756878db45e78c24b62c01ebefc0b14b1"} Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.236443 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b","Type":"ContainerStarted","Data":"23080c098e9241c0fbef2d16834563f5be32e05ed6a93235162276519247c330"} Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.236499 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b","Type":"ContainerStarted","Data":"402a519ada8012b4c837384aa46b5de9d5a53090de81a4bfd5fca5e66afd80ab"} Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.236514 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b","Type":"ContainerStarted","Data":"83482a05302b7d016da2098260530a40bbabcc0dd30bbee8e001d56649d1fa10"} Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.236524 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b","Type":"ContainerStarted","Data":"583bc9aead517370de5511bc87f2ce12fd00f5d749568164aaf7dd9550bed55d"} Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.236536 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b","Type":"ContainerStarted","Data":"c8e35cc4fdc899da4c083432abcbef5e1ad92a9b95d3984cab5952bb33e1b375"} Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.285447 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=36.385814592 podStartE2EDuration="43.285418231s" podCreationTimestamp="2025-12-05 06:10:36 +0000 UTC" firstStartedPulling="2025-12-05 06:11:10.647596516 +0000 UTC m=+1146.559731578" lastFinishedPulling="2025-12-05 06:11:17.547200125 +0000 UTC m=+1153.459335217" observedRunningTime="2025-12-05 06:11:19.284014154 +0000 UTC m=+1155.196149306" watchObservedRunningTime="2025-12-05 06:11:19.285418231 +0000 UTC m=+1155.197553343" Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.579276 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-r2rbf"] Dec 05 06:11:19 crc kubenswrapper[4742]: E1205 06:11:19.579826 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52759157-a5b0-481a-9128-ee595e269af9" containerName="swift-ring-rebalance" Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.579858 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="52759157-a5b0-481a-9128-ee595e269af9" containerName="swift-ring-rebalance" Dec 05 06:11:19 crc kubenswrapper[4742]: E1205 06:11:19.579883 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97635213-f3d8-434d-9f8c-27427ff42464" containerName="ovn-config" Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.579895 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="97635213-f3d8-434d-9f8c-27427ff42464" containerName="ovn-config" Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.580174 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="97635213-f3d8-434d-9f8c-27427ff42464" containerName="ovn-config" Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.580216 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="52759157-a5b0-481a-9128-ee595e269af9" containerName="swift-ring-rebalance" Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.581511 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c79d794d7-r2rbf" Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.586868 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.598184 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-r2rbf"] Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.761130 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1ca5c510-f473-4db9-8cb0-de955202c98b-ovsdbserver-nb\") pod \"dnsmasq-dns-5c79d794d7-r2rbf\" (UID: \"1ca5c510-f473-4db9-8cb0-de955202c98b\") " pod="openstack/dnsmasq-dns-5c79d794d7-r2rbf" Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.761219 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2p7s\" (UniqueName: \"kubernetes.io/projected/1ca5c510-f473-4db9-8cb0-de955202c98b-kube-api-access-c2p7s\") pod \"dnsmasq-dns-5c79d794d7-r2rbf\" (UID: \"1ca5c510-f473-4db9-8cb0-de955202c98b\") " pod="openstack/dnsmasq-dns-5c79d794d7-r2rbf" Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.761274 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1ca5c510-f473-4db9-8cb0-de955202c98b-ovsdbserver-sb\") pod \"dnsmasq-dns-5c79d794d7-r2rbf\" (UID: \"1ca5c510-f473-4db9-8cb0-de955202c98b\") " pod="openstack/dnsmasq-dns-5c79d794d7-r2rbf" Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.761348 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1ca5c510-f473-4db9-8cb0-de955202c98b-dns-swift-storage-0\") pod \"dnsmasq-dns-5c79d794d7-r2rbf\" (UID: \"1ca5c510-f473-4db9-8cb0-de955202c98b\") " pod="openstack/dnsmasq-dns-5c79d794d7-r2rbf" Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.761461 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1ca5c510-f473-4db9-8cb0-de955202c98b-dns-svc\") pod \"dnsmasq-dns-5c79d794d7-r2rbf\" (UID: \"1ca5c510-f473-4db9-8cb0-de955202c98b\") " pod="openstack/dnsmasq-dns-5c79d794d7-r2rbf" Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.761809 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ca5c510-f473-4db9-8cb0-de955202c98b-config\") pod \"dnsmasq-dns-5c79d794d7-r2rbf\" (UID: \"1ca5c510-f473-4db9-8cb0-de955202c98b\") " pod="openstack/dnsmasq-dns-5c79d794d7-r2rbf" Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.862977 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1ca5c510-f473-4db9-8cb0-de955202c98b-ovsdbserver-nb\") pod \"dnsmasq-dns-5c79d794d7-r2rbf\" (UID: \"1ca5c510-f473-4db9-8cb0-de955202c98b\") " pod="openstack/dnsmasq-dns-5c79d794d7-r2rbf" Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.863035 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2p7s\" (UniqueName: \"kubernetes.io/projected/1ca5c510-f473-4db9-8cb0-de955202c98b-kube-api-access-c2p7s\") pod \"dnsmasq-dns-5c79d794d7-r2rbf\" (UID: \"1ca5c510-f473-4db9-8cb0-de955202c98b\") " pod="openstack/dnsmasq-dns-5c79d794d7-r2rbf" Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.863084 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1ca5c510-f473-4db9-8cb0-de955202c98b-ovsdbserver-sb\") pod \"dnsmasq-dns-5c79d794d7-r2rbf\" (UID: \"1ca5c510-f473-4db9-8cb0-de955202c98b\") " pod="openstack/dnsmasq-dns-5c79d794d7-r2rbf" Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.863135 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1ca5c510-f473-4db9-8cb0-de955202c98b-dns-swift-storage-0\") pod \"dnsmasq-dns-5c79d794d7-r2rbf\" (UID: \"1ca5c510-f473-4db9-8cb0-de955202c98b\") " pod="openstack/dnsmasq-dns-5c79d794d7-r2rbf" Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.863206 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1ca5c510-f473-4db9-8cb0-de955202c98b-dns-svc\") pod \"dnsmasq-dns-5c79d794d7-r2rbf\" (UID: \"1ca5c510-f473-4db9-8cb0-de955202c98b\") " pod="openstack/dnsmasq-dns-5c79d794d7-r2rbf" Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.863261 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ca5c510-f473-4db9-8cb0-de955202c98b-config\") pod \"dnsmasq-dns-5c79d794d7-r2rbf\" (UID: \"1ca5c510-f473-4db9-8cb0-de955202c98b\") " pod="openstack/dnsmasq-dns-5c79d794d7-r2rbf" Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.864151 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ca5c510-f473-4db9-8cb0-de955202c98b-config\") pod \"dnsmasq-dns-5c79d794d7-r2rbf\" (UID: \"1ca5c510-f473-4db9-8cb0-de955202c98b\") " pod="openstack/dnsmasq-dns-5c79d794d7-r2rbf" Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.864847 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1ca5c510-f473-4db9-8cb0-de955202c98b-ovsdbserver-sb\") pod \"dnsmasq-dns-5c79d794d7-r2rbf\" (UID: \"1ca5c510-f473-4db9-8cb0-de955202c98b\") " pod="openstack/dnsmasq-dns-5c79d794d7-r2rbf" Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.864159 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1ca5c510-f473-4db9-8cb0-de955202c98b-ovsdbserver-nb\") pod \"dnsmasq-dns-5c79d794d7-r2rbf\" (UID: \"1ca5c510-f473-4db9-8cb0-de955202c98b\") " pod="openstack/dnsmasq-dns-5c79d794d7-r2rbf" Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.864922 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1ca5c510-f473-4db9-8cb0-de955202c98b-dns-svc\") pod \"dnsmasq-dns-5c79d794d7-r2rbf\" (UID: \"1ca5c510-f473-4db9-8cb0-de955202c98b\") " pod="openstack/dnsmasq-dns-5c79d794d7-r2rbf" Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.865017 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1ca5c510-f473-4db9-8cb0-de955202c98b-dns-swift-storage-0\") pod \"dnsmasq-dns-5c79d794d7-r2rbf\" (UID: \"1ca5c510-f473-4db9-8cb0-de955202c98b\") " pod="openstack/dnsmasq-dns-5c79d794d7-r2rbf" Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.899270 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2p7s\" (UniqueName: \"kubernetes.io/projected/1ca5c510-f473-4db9-8cb0-de955202c98b-kube-api-access-c2p7s\") pod \"dnsmasq-dns-5c79d794d7-r2rbf\" (UID: \"1ca5c510-f473-4db9-8cb0-de955202c98b\") " pod="openstack/dnsmasq-dns-5c79d794d7-r2rbf" Dec 05 06:11:19 crc kubenswrapper[4742]: I1205 06:11:19.917773 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c79d794d7-r2rbf" Dec 05 06:11:20 crc kubenswrapper[4742]: I1205 06:11:20.410239 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-r2rbf"] Dec 05 06:11:20 crc kubenswrapper[4742]: I1205 06:11:20.596119 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-f8hfs" Dec 05 06:11:20 crc kubenswrapper[4742]: I1205 06:11:20.680415 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c7413df9-1f00-41d2-8ef1-e85a83fd6eac-db-sync-config-data\") pod \"c7413df9-1f00-41d2-8ef1-e85a83fd6eac\" (UID: \"c7413df9-1f00-41d2-8ef1-e85a83fd6eac\") " Dec 05 06:11:20 crc kubenswrapper[4742]: I1205 06:11:20.680501 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7413df9-1f00-41d2-8ef1-e85a83fd6eac-combined-ca-bundle\") pod \"c7413df9-1f00-41d2-8ef1-e85a83fd6eac\" (UID: \"c7413df9-1f00-41d2-8ef1-e85a83fd6eac\") " Dec 05 06:11:20 crc kubenswrapper[4742]: I1205 06:11:20.680647 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7413df9-1f00-41d2-8ef1-e85a83fd6eac-config-data\") pod \"c7413df9-1f00-41d2-8ef1-e85a83fd6eac\" (UID: \"c7413df9-1f00-41d2-8ef1-e85a83fd6eac\") " Dec 05 06:11:20 crc kubenswrapper[4742]: I1205 06:11:20.681084 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k9258\" (UniqueName: \"kubernetes.io/projected/c7413df9-1f00-41d2-8ef1-e85a83fd6eac-kube-api-access-k9258\") pod \"c7413df9-1f00-41d2-8ef1-e85a83fd6eac\" (UID: \"c7413df9-1f00-41d2-8ef1-e85a83fd6eac\") " Dec 05 06:11:20 crc kubenswrapper[4742]: I1205 06:11:20.684702 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7413df9-1f00-41d2-8ef1-e85a83fd6eac-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "c7413df9-1f00-41d2-8ef1-e85a83fd6eac" (UID: "c7413df9-1f00-41d2-8ef1-e85a83fd6eac"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:11:20 crc kubenswrapper[4742]: I1205 06:11:20.684919 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7413df9-1f00-41d2-8ef1-e85a83fd6eac-kube-api-access-k9258" (OuterVolumeSpecName: "kube-api-access-k9258") pod "c7413df9-1f00-41d2-8ef1-e85a83fd6eac" (UID: "c7413df9-1f00-41d2-8ef1-e85a83fd6eac"). InnerVolumeSpecName "kube-api-access-k9258". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:11:20 crc kubenswrapper[4742]: I1205 06:11:20.724632 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7413df9-1f00-41d2-8ef1-e85a83fd6eac-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c7413df9-1f00-41d2-8ef1-e85a83fd6eac" (UID: "c7413df9-1f00-41d2-8ef1-e85a83fd6eac"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:11:20 crc kubenswrapper[4742]: I1205 06:11:20.726010 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7413df9-1f00-41d2-8ef1-e85a83fd6eac-config-data" (OuterVolumeSpecName: "config-data") pod "c7413df9-1f00-41d2-8ef1-e85a83fd6eac" (UID: "c7413df9-1f00-41d2-8ef1-e85a83fd6eac"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:11:20 crc kubenswrapper[4742]: I1205 06:11:20.782975 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k9258\" (UniqueName: \"kubernetes.io/projected/c7413df9-1f00-41d2-8ef1-e85a83fd6eac-kube-api-access-k9258\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:20 crc kubenswrapper[4742]: I1205 06:11:20.783049 4742 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c7413df9-1f00-41d2-8ef1-e85a83fd6eac-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:20 crc kubenswrapper[4742]: I1205 06:11:20.783103 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7413df9-1f00-41d2-8ef1-e85a83fd6eac-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:20 crc kubenswrapper[4742]: I1205 06:11:20.783119 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7413df9-1f00-41d2-8ef1-e85a83fd6eac-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:21 crc kubenswrapper[4742]: I1205 06:11:21.279754 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-f8hfs" event={"ID":"c7413df9-1f00-41d2-8ef1-e85a83fd6eac","Type":"ContainerDied","Data":"baa8b224d27c0b5cb58d9d2b7e21399d7177d64b1ac8d63eae15aae506603feb"} Dec 05 06:11:21 crc kubenswrapper[4742]: I1205 06:11:21.280476 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="baa8b224d27c0b5cb58d9d2b7e21399d7177d64b1ac8d63eae15aae506603feb" Dec 05 06:11:21 crc kubenswrapper[4742]: I1205 06:11:21.279827 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-f8hfs" Dec 05 06:11:21 crc kubenswrapper[4742]: I1205 06:11:21.283906 4742 generic.go:334] "Generic (PLEG): container finished" podID="1ca5c510-f473-4db9-8cb0-de955202c98b" containerID="195bcf9bca256d5b98ca493c344d79d31f31775c9bae521c0729d8c6d99f69c8" exitCode=0 Dec 05 06:11:21 crc kubenswrapper[4742]: I1205 06:11:21.283967 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-r2rbf" event={"ID":"1ca5c510-f473-4db9-8cb0-de955202c98b","Type":"ContainerDied","Data":"195bcf9bca256d5b98ca493c344d79d31f31775c9bae521c0729d8c6d99f69c8"} Dec 05 06:11:21 crc kubenswrapper[4742]: I1205 06:11:21.284006 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-r2rbf" event={"ID":"1ca5c510-f473-4db9-8cb0-de955202c98b","Type":"ContainerStarted","Data":"3ffeed4b44580cb70c29dd8b80d1029512a88adb9c8c50cc280ae970b271762e"} Dec 05 06:11:21 crc kubenswrapper[4742]: E1205 06:11:21.706728 4742 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc7413df9_1f00_41d2_8ef1_e85a83fd6eac.slice/crio-baa8b224d27c0b5cb58d9d2b7e21399d7177d64b1ac8d63eae15aae506603feb\": RecentStats: unable to find data in memory cache]" Dec 05 06:11:21 crc kubenswrapper[4742]: I1205 06:11:21.757791 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-r2rbf"] Dec 05 06:11:21 crc kubenswrapper[4742]: I1205 06:11:21.770885 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-mlw4h"] Dec 05 06:11:21 crc kubenswrapper[4742]: E1205 06:11:21.771331 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7413df9-1f00-41d2-8ef1-e85a83fd6eac" containerName="glance-db-sync" Dec 05 06:11:21 crc kubenswrapper[4742]: I1205 06:11:21.771347 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7413df9-1f00-41d2-8ef1-e85a83fd6eac" containerName="glance-db-sync" Dec 05 06:11:21 crc kubenswrapper[4742]: I1205 06:11:21.771529 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7413df9-1f00-41d2-8ef1-e85a83fd6eac" containerName="glance-db-sync" Dec 05 06:11:21 crc kubenswrapper[4742]: I1205 06:11:21.772412 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" Dec 05 06:11:21 crc kubenswrapper[4742]: I1205 06:11:21.778711 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-mlw4h"] Dec 05 06:11:21 crc kubenswrapper[4742]: I1205 06:11:21.849188 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 05 06:11:21 crc kubenswrapper[4742]: I1205 06:11:21.913144 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9979f7f7-778c-47f3-8263-d3d93753e714-dns-svc\") pod \"dnsmasq-dns-5f59b8f679-mlw4h\" (UID: \"9979f7f7-778c-47f3-8263-d3d93753e714\") " pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" Dec 05 06:11:21 crc kubenswrapper[4742]: I1205 06:11:21.913218 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lsw9v\" (UniqueName: \"kubernetes.io/projected/9979f7f7-778c-47f3-8263-d3d93753e714-kube-api-access-lsw9v\") pod \"dnsmasq-dns-5f59b8f679-mlw4h\" (UID: \"9979f7f7-778c-47f3-8263-d3d93753e714\") " pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" Dec 05 06:11:21 crc kubenswrapper[4742]: I1205 06:11:21.913243 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9979f7f7-778c-47f3-8263-d3d93753e714-config\") pod \"dnsmasq-dns-5f59b8f679-mlw4h\" (UID: \"9979f7f7-778c-47f3-8263-d3d93753e714\") " pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" Dec 05 06:11:21 crc kubenswrapper[4742]: I1205 06:11:21.913468 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9979f7f7-778c-47f3-8263-d3d93753e714-ovsdbserver-nb\") pod \"dnsmasq-dns-5f59b8f679-mlw4h\" (UID: \"9979f7f7-778c-47f3-8263-d3d93753e714\") " pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" Dec 05 06:11:21 crc kubenswrapper[4742]: I1205 06:11:21.913572 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9979f7f7-778c-47f3-8263-d3d93753e714-ovsdbserver-sb\") pod \"dnsmasq-dns-5f59b8f679-mlw4h\" (UID: \"9979f7f7-778c-47f3-8263-d3d93753e714\") " pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" Dec 05 06:11:21 crc kubenswrapper[4742]: I1205 06:11:21.913677 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9979f7f7-778c-47f3-8263-d3d93753e714-dns-swift-storage-0\") pod \"dnsmasq-dns-5f59b8f679-mlw4h\" (UID: \"9979f7f7-778c-47f3-8263-d3d93753e714\") " pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.014972 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9979f7f7-778c-47f3-8263-d3d93753e714-dns-swift-storage-0\") pod \"dnsmasq-dns-5f59b8f679-mlw4h\" (UID: \"9979f7f7-778c-47f3-8263-d3d93753e714\") " pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.015038 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9979f7f7-778c-47f3-8263-d3d93753e714-dns-svc\") pod \"dnsmasq-dns-5f59b8f679-mlw4h\" (UID: \"9979f7f7-778c-47f3-8263-d3d93753e714\") " pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.015087 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lsw9v\" (UniqueName: \"kubernetes.io/projected/9979f7f7-778c-47f3-8263-d3d93753e714-kube-api-access-lsw9v\") pod \"dnsmasq-dns-5f59b8f679-mlw4h\" (UID: \"9979f7f7-778c-47f3-8263-d3d93753e714\") " pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.015106 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9979f7f7-778c-47f3-8263-d3d93753e714-config\") pod \"dnsmasq-dns-5f59b8f679-mlw4h\" (UID: \"9979f7f7-778c-47f3-8263-d3d93753e714\") " pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.015218 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9979f7f7-778c-47f3-8263-d3d93753e714-ovsdbserver-nb\") pod \"dnsmasq-dns-5f59b8f679-mlw4h\" (UID: \"9979f7f7-778c-47f3-8263-d3d93753e714\") " pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.015275 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9979f7f7-778c-47f3-8263-d3d93753e714-ovsdbserver-sb\") pod \"dnsmasq-dns-5f59b8f679-mlw4h\" (UID: \"9979f7f7-778c-47f3-8263-d3d93753e714\") " pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.015865 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9979f7f7-778c-47f3-8263-d3d93753e714-dns-svc\") pod \"dnsmasq-dns-5f59b8f679-mlw4h\" (UID: \"9979f7f7-778c-47f3-8263-d3d93753e714\") " pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.016383 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9979f7f7-778c-47f3-8263-d3d93753e714-ovsdbserver-nb\") pod \"dnsmasq-dns-5f59b8f679-mlw4h\" (UID: \"9979f7f7-778c-47f3-8263-d3d93753e714\") " pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.016386 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9979f7f7-778c-47f3-8263-d3d93753e714-ovsdbserver-sb\") pod \"dnsmasq-dns-5f59b8f679-mlw4h\" (UID: \"9979f7f7-778c-47f3-8263-d3d93753e714\") " pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.016635 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9979f7f7-778c-47f3-8263-d3d93753e714-config\") pod \"dnsmasq-dns-5f59b8f679-mlw4h\" (UID: \"9979f7f7-778c-47f3-8263-d3d93753e714\") " pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.017083 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9979f7f7-778c-47f3-8263-d3d93753e714-dns-swift-storage-0\") pod \"dnsmasq-dns-5f59b8f679-mlw4h\" (UID: \"9979f7f7-778c-47f3-8263-d3d93753e714\") " pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.055162 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lsw9v\" (UniqueName: \"kubernetes.io/projected/9979f7f7-778c-47f3-8263-d3d93753e714-kube-api-access-lsw9v\") pod \"dnsmasq-dns-5f59b8f679-mlw4h\" (UID: \"9979f7f7-778c-47f3-8263-d3d93753e714\") " pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.090332 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-vjmxb"] Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.091894 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-vjmxb" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.095596 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.113333 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-vjmxb"] Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.182278 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.218673 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-ncssk"] Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.219756 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-ncssk" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.221278 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4e3a818a-cec4-45bb-9da7-5f26059045a8-operator-scripts\") pod \"cinder-db-create-vjmxb\" (UID: \"4e3a818a-cec4-45bb-9da7-5f26059045a8\") " pod="openstack/cinder-db-create-vjmxb" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.221322 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxjlh\" (UniqueName: \"kubernetes.io/projected/4e3a818a-cec4-45bb-9da7-5f26059045a8-kube-api-access-zxjlh\") pod \"cinder-db-create-vjmxb\" (UID: \"4e3a818a-cec4-45bb-9da7-5f26059045a8\") " pod="openstack/cinder-db-create-vjmxb" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.239946 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-edde-account-create-update-nwbxj"] Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.241781 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-edde-account-create-update-nwbxj" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.246706 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.258113 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-edde-account-create-update-nwbxj"] Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.280035 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-ncssk"] Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.310422 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-r2rbf" event={"ID":"1ca5c510-f473-4db9-8cb0-de955202c98b","Type":"ContainerStarted","Data":"e793a240be40e34f0104a265b9bcd8bcc6c7dd200ec1a74efab5189561a3a54f"} Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.310565 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c79d794d7-r2rbf" podUID="1ca5c510-f473-4db9-8cb0-de955202c98b" containerName="dnsmasq-dns" containerID="cri-o://e793a240be40e34f0104a265b9bcd8bcc6c7dd200ec1a74efab5189561a3a54f" gracePeriod=10 Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.310673 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c79d794d7-r2rbf" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.322782 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad-operator-scripts\") pod \"cinder-edde-account-create-update-nwbxj\" (UID: \"4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad\") " pod="openstack/cinder-edde-account-create-update-nwbxj" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.322878 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4e3a818a-cec4-45bb-9da7-5f26059045a8-operator-scripts\") pod \"cinder-db-create-vjmxb\" (UID: \"4e3a818a-cec4-45bb-9da7-5f26059045a8\") " pod="openstack/cinder-db-create-vjmxb" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.322908 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lr6t4\" (UniqueName: \"kubernetes.io/projected/4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad-kube-api-access-lr6t4\") pod \"cinder-edde-account-create-update-nwbxj\" (UID: \"4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad\") " pod="openstack/cinder-edde-account-create-update-nwbxj" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.322931 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxjlh\" (UniqueName: \"kubernetes.io/projected/4e3a818a-cec4-45bb-9da7-5f26059045a8-kube-api-access-zxjlh\") pod \"cinder-db-create-vjmxb\" (UID: \"4e3a818a-cec4-45bb-9da7-5f26059045a8\") " pod="openstack/cinder-db-create-vjmxb" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.322972 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmpl8\" (UniqueName: \"kubernetes.io/projected/606e80d6-e92c-4f5d-9806-33e538679939-kube-api-access-wmpl8\") pod \"barbican-db-create-ncssk\" (UID: \"606e80d6-e92c-4f5d-9806-33e538679939\") " pod="openstack/barbican-db-create-ncssk" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.323003 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/606e80d6-e92c-4f5d-9806-33e538679939-operator-scripts\") pod \"barbican-db-create-ncssk\" (UID: \"606e80d6-e92c-4f5d-9806-33e538679939\") " pod="openstack/barbican-db-create-ncssk" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.324909 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4e3a818a-cec4-45bb-9da7-5f26059045a8-operator-scripts\") pod \"cinder-db-create-vjmxb\" (UID: \"4e3a818a-cec4-45bb-9da7-5f26059045a8\") " pod="openstack/cinder-db-create-vjmxb" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.338305 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-f9da-account-create-update-zhzzh"] Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.339337 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-f9da-account-create-update-zhzzh" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.340326 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxjlh\" (UniqueName: \"kubernetes.io/projected/4e3a818a-cec4-45bb-9da7-5f26059045a8-kube-api-access-zxjlh\") pod \"cinder-db-create-vjmxb\" (UID: \"4e3a818a-cec4-45bb-9da7-5f26059045a8\") " pod="openstack/cinder-db-create-vjmxb" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.341101 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.368033 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-f9da-account-create-update-zhzzh"] Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.376864 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c79d794d7-r2rbf" podStartSLOduration=3.376848094 podStartE2EDuration="3.376848094s" podCreationTimestamp="2025-12-05 06:11:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:11:22.332409164 +0000 UTC m=+1158.244544236" watchObservedRunningTime="2025-12-05 06:11:22.376848094 +0000 UTC m=+1158.288983146" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.406997 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-vjmxb" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.427884 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/93dd367f-1105-4615-9562-2d9ad648e7a9-operator-scripts\") pod \"barbican-f9da-account-create-update-zhzzh\" (UID: \"93dd367f-1105-4615-9562-2d9ad648e7a9\") " pod="openstack/barbican-f9da-account-create-update-zhzzh" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.427925 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad-operator-scripts\") pod \"cinder-edde-account-create-update-nwbxj\" (UID: \"4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad\") " pod="openstack/cinder-edde-account-create-update-nwbxj" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.427982 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lr6t4\" (UniqueName: \"kubernetes.io/projected/4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad-kube-api-access-lr6t4\") pod \"cinder-edde-account-create-update-nwbxj\" (UID: \"4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad\") " pod="openstack/cinder-edde-account-create-update-nwbxj" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.428015 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hlmx\" (UniqueName: \"kubernetes.io/projected/93dd367f-1105-4615-9562-2d9ad648e7a9-kube-api-access-4hlmx\") pod \"barbican-f9da-account-create-update-zhzzh\" (UID: \"93dd367f-1105-4615-9562-2d9ad648e7a9\") " pod="openstack/barbican-f9da-account-create-update-zhzzh" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.428038 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmpl8\" (UniqueName: \"kubernetes.io/projected/606e80d6-e92c-4f5d-9806-33e538679939-kube-api-access-wmpl8\") pod \"barbican-db-create-ncssk\" (UID: \"606e80d6-e92c-4f5d-9806-33e538679939\") " pod="openstack/barbican-db-create-ncssk" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.428086 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/606e80d6-e92c-4f5d-9806-33e538679939-operator-scripts\") pod \"barbican-db-create-ncssk\" (UID: \"606e80d6-e92c-4f5d-9806-33e538679939\") " pod="openstack/barbican-db-create-ncssk" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.428659 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/606e80d6-e92c-4f5d-9806-33e538679939-operator-scripts\") pod \"barbican-db-create-ncssk\" (UID: \"606e80d6-e92c-4f5d-9806-33e538679939\") " pod="openstack/barbican-db-create-ncssk" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.429180 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad-operator-scripts\") pod \"cinder-edde-account-create-update-nwbxj\" (UID: \"4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad\") " pod="openstack/cinder-edde-account-create-update-nwbxj" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.499508 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-pfkmv"] Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.501219 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-pfkmv" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.502310 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmpl8\" (UniqueName: \"kubernetes.io/projected/606e80d6-e92c-4f5d-9806-33e538679939-kube-api-access-wmpl8\") pod \"barbican-db-create-ncssk\" (UID: \"606e80d6-e92c-4f5d-9806-33e538679939\") " pod="openstack/barbican-db-create-ncssk" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.508241 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.508638 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.508655 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.509716 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-dbvnt" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.519592 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-pfkmv"] Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.532673 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/93dd367f-1105-4615-9562-2d9ad648e7a9-operator-scripts\") pod \"barbican-f9da-account-create-update-zhzzh\" (UID: \"93dd367f-1105-4615-9562-2d9ad648e7a9\") " pod="openstack/barbican-f9da-account-create-update-zhzzh" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.533146 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hlmx\" (UniqueName: \"kubernetes.io/projected/93dd367f-1105-4615-9562-2d9ad648e7a9-kube-api-access-4hlmx\") pod \"barbican-f9da-account-create-update-zhzzh\" (UID: \"93dd367f-1105-4615-9562-2d9ad648e7a9\") " pod="openstack/barbican-f9da-account-create-update-zhzzh" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.533248 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lr6t4\" (UniqueName: \"kubernetes.io/projected/4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad-kube-api-access-lr6t4\") pod \"cinder-edde-account-create-update-nwbxj\" (UID: \"4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad\") " pod="openstack/cinder-edde-account-create-update-nwbxj" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.533585 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/93dd367f-1105-4615-9562-2d9ad648e7a9-operator-scripts\") pod \"barbican-f9da-account-create-update-zhzzh\" (UID: \"93dd367f-1105-4615-9562-2d9ad648e7a9\") " pod="openstack/barbican-f9da-account-create-update-zhzzh" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.535159 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-2c8pq"] Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.538718 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-2c8pq" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.547485 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-ncssk" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.563386 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-2c8pq"] Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.565472 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hlmx\" (UniqueName: \"kubernetes.io/projected/93dd367f-1105-4615-9562-2d9ad648e7a9-kube-api-access-4hlmx\") pod \"barbican-f9da-account-create-update-zhzzh\" (UID: \"93dd367f-1105-4615-9562-2d9ad648e7a9\") " pod="openstack/barbican-f9da-account-create-update-zhzzh" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.580192 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-edde-account-create-update-nwbxj" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.631763 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5be2-account-create-update-tntgt"] Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.632856 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5be2-account-create-update-tntgt" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.634207 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e75b039-76da-40c8-a486-0f310cafa125-combined-ca-bundle\") pod \"keystone-db-sync-pfkmv\" (UID: \"7e75b039-76da-40c8-a486-0f310cafa125\") " pod="openstack/keystone-db-sync-pfkmv" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.634238 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0-operator-scripts\") pod \"neutron-db-create-2c8pq\" (UID: \"1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0\") " pod="openstack/neutron-db-create-2c8pq" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.634272 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xfjnd\" (UniqueName: \"kubernetes.io/projected/7e75b039-76da-40c8-a486-0f310cafa125-kube-api-access-xfjnd\") pod \"keystone-db-sync-pfkmv\" (UID: \"7e75b039-76da-40c8-a486-0f310cafa125\") " pod="openstack/keystone-db-sync-pfkmv" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.634291 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tt26s\" (UniqueName: \"kubernetes.io/projected/1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0-kube-api-access-tt26s\") pod \"neutron-db-create-2c8pq\" (UID: \"1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0\") " pod="openstack/neutron-db-create-2c8pq" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.634522 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e75b039-76da-40c8-a486-0f310cafa125-config-data\") pod \"keystone-db-sync-pfkmv\" (UID: \"7e75b039-76da-40c8-a486-0f310cafa125\") " pod="openstack/keystone-db-sync-pfkmv" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.635118 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.639388 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5be2-account-create-update-tntgt"] Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.736427 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-znkm6\" (UniqueName: \"kubernetes.io/projected/49b98e00-ff84-46a9-b808-262b1246348c-kube-api-access-znkm6\") pod \"neutron-5be2-account-create-update-tntgt\" (UID: \"49b98e00-ff84-46a9-b808-262b1246348c\") " pod="openstack/neutron-5be2-account-create-update-tntgt" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.736879 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e75b039-76da-40c8-a486-0f310cafa125-combined-ca-bundle\") pod \"keystone-db-sync-pfkmv\" (UID: \"7e75b039-76da-40c8-a486-0f310cafa125\") " pod="openstack/keystone-db-sync-pfkmv" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.736909 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0-operator-scripts\") pod \"neutron-db-create-2c8pq\" (UID: \"1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0\") " pod="openstack/neutron-db-create-2c8pq" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.736943 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49b98e00-ff84-46a9-b808-262b1246348c-operator-scripts\") pod \"neutron-5be2-account-create-update-tntgt\" (UID: \"49b98e00-ff84-46a9-b808-262b1246348c\") " pod="openstack/neutron-5be2-account-create-update-tntgt" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.736980 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xfjnd\" (UniqueName: \"kubernetes.io/projected/7e75b039-76da-40c8-a486-0f310cafa125-kube-api-access-xfjnd\") pod \"keystone-db-sync-pfkmv\" (UID: \"7e75b039-76da-40c8-a486-0f310cafa125\") " pod="openstack/keystone-db-sync-pfkmv" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.737015 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tt26s\" (UniqueName: \"kubernetes.io/projected/1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0-kube-api-access-tt26s\") pod \"neutron-db-create-2c8pq\" (UID: \"1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0\") " pod="openstack/neutron-db-create-2c8pq" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.737088 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e75b039-76da-40c8-a486-0f310cafa125-config-data\") pod \"keystone-db-sync-pfkmv\" (UID: \"7e75b039-76da-40c8-a486-0f310cafa125\") " pod="openstack/keystone-db-sync-pfkmv" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.737944 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0-operator-scripts\") pod \"neutron-db-create-2c8pq\" (UID: \"1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0\") " pod="openstack/neutron-db-create-2c8pq" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.742606 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e75b039-76da-40c8-a486-0f310cafa125-config-data\") pod \"keystone-db-sync-pfkmv\" (UID: \"7e75b039-76da-40c8-a486-0f310cafa125\") " pod="openstack/keystone-db-sync-pfkmv" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.742692 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e75b039-76da-40c8-a486-0f310cafa125-combined-ca-bundle\") pod \"keystone-db-sync-pfkmv\" (UID: \"7e75b039-76da-40c8-a486-0f310cafa125\") " pod="openstack/keystone-db-sync-pfkmv" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.752405 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xfjnd\" (UniqueName: \"kubernetes.io/projected/7e75b039-76da-40c8-a486-0f310cafa125-kube-api-access-xfjnd\") pod \"keystone-db-sync-pfkmv\" (UID: \"7e75b039-76da-40c8-a486-0f310cafa125\") " pod="openstack/keystone-db-sync-pfkmv" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.752908 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tt26s\" (UniqueName: \"kubernetes.io/projected/1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0-kube-api-access-tt26s\") pod \"neutron-db-create-2c8pq\" (UID: \"1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0\") " pod="openstack/neutron-db-create-2c8pq" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.794575 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-mlw4h"] Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.810898 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-f9da-account-create-update-zhzzh" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.834663 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-pfkmv" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.838267 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49b98e00-ff84-46a9-b808-262b1246348c-operator-scripts\") pod \"neutron-5be2-account-create-update-tntgt\" (UID: \"49b98e00-ff84-46a9-b808-262b1246348c\") " pod="openstack/neutron-5be2-account-create-update-tntgt" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.838358 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-znkm6\" (UniqueName: \"kubernetes.io/projected/49b98e00-ff84-46a9-b808-262b1246348c-kube-api-access-znkm6\") pod \"neutron-5be2-account-create-update-tntgt\" (UID: \"49b98e00-ff84-46a9-b808-262b1246348c\") " pod="openstack/neutron-5be2-account-create-update-tntgt" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.839224 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49b98e00-ff84-46a9-b808-262b1246348c-operator-scripts\") pod \"neutron-5be2-account-create-update-tntgt\" (UID: \"49b98e00-ff84-46a9-b808-262b1246348c\") " pod="openstack/neutron-5be2-account-create-update-tntgt" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.859575 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-znkm6\" (UniqueName: \"kubernetes.io/projected/49b98e00-ff84-46a9-b808-262b1246348c-kube-api-access-znkm6\") pod \"neutron-5be2-account-create-update-tntgt\" (UID: \"49b98e00-ff84-46a9-b808-262b1246348c\") " pod="openstack/neutron-5be2-account-create-update-tntgt" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.868511 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c79d794d7-r2rbf" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.885972 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-2c8pq" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.939377 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c2p7s\" (UniqueName: \"kubernetes.io/projected/1ca5c510-f473-4db9-8cb0-de955202c98b-kube-api-access-c2p7s\") pod \"1ca5c510-f473-4db9-8cb0-de955202c98b\" (UID: \"1ca5c510-f473-4db9-8cb0-de955202c98b\") " Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.939436 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1ca5c510-f473-4db9-8cb0-de955202c98b-ovsdbserver-sb\") pod \"1ca5c510-f473-4db9-8cb0-de955202c98b\" (UID: \"1ca5c510-f473-4db9-8cb0-de955202c98b\") " Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.939475 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1ca5c510-f473-4db9-8cb0-de955202c98b-ovsdbserver-nb\") pod \"1ca5c510-f473-4db9-8cb0-de955202c98b\" (UID: \"1ca5c510-f473-4db9-8cb0-de955202c98b\") " Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.939528 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1ca5c510-f473-4db9-8cb0-de955202c98b-dns-swift-storage-0\") pod \"1ca5c510-f473-4db9-8cb0-de955202c98b\" (UID: \"1ca5c510-f473-4db9-8cb0-de955202c98b\") " Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.939615 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ca5c510-f473-4db9-8cb0-de955202c98b-config\") pod \"1ca5c510-f473-4db9-8cb0-de955202c98b\" (UID: \"1ca5c510-f473-4db9-8cb0-de955202c98b\") " Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.939661 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1ca5c510-f473-4db9-8cb0-de955202c98b-dns-svc\") pod \"1ca5c510-f473-4db9-8cb0-de955202c98b\" (UID: \"1ca5c510-f473-4db9-8cb0-de955202c98b\") " Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.957219 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ca5c510-f473-4db9-8cb0-de955202c98b-kube-api-access-c2p7s" (OuterVolumeSpecName: "kube-api-access-c2p7s") pod "1ca5c510-f473-4db9-8cb0-de955202c98b" (UID: "1ca5c510-f473-4db9-8cb0-de955202c98b"). InnerVolumeSpecName "kube-api-access-c2p7s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:11:22 crc kubenswrapper[4742]: I1205 06:11:22.959611 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5be2-account-create-update-tntgt" Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.002716 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ca5c510-f473-4db9-8cb0-de955202c98b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1ca5c510-f473-4db9-8cb0-de955202c98b" (UID: "1ca5c510-f473-4db9-8cb0-de955202c98b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.011566 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ca5c510-f473-4db9-8cb0-de955202c98b-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "1ca5c510-f473-4db9-8cb0-de955202c98b" (UID: "1ca5c510-f473-4db9-8cb0-de955202c98b"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.039413 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ca5c510-f473-4db9-8cb0-de955202c98b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1ca5c510-f473-4db9-8cb0-de955202c98b" (UID: "1ca5c510-f473-4db9-8cb0-de955202c98b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.041039 4742 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1ca5c510-f473-4db9-8cb0-de955202c98b-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.041086 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c2p7s\" (UniqueName: \"kubernetes.io/projected/1ca5c510-f473-4db9-8cb0-de955202c98b-kube-api-access-c2p7s\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.041105 4742 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1ca5c510-f473-4db9-8cb0-de955202c98b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.041117 4742 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1ca5c510-f473-4db9-8cb0-de955202c98b-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.042632 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ca5c510-f473-4db9-8cb0-de955202c98b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1ca5c510-f473-4db9-8cb0-de955202c98b" (UID: "1ca5c510-f473-4db9-8cb0-de955202c98b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.044441 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ca5c510-f473-4db9-8cb0-de955202c98b-config" (OuterVolumeSpecName: "config") pod "1ca5c510-f473-4db9-8cb0-de955202c98b" (UID: "1ca5c510-f473-4db9-8cb0-de955202c98b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.054037 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-vjmxb"] Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.143592 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ca5c510-f473-4db9-8cb0-de955202c98b-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.143615 4742 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1ca5c510-f473-4db9-8cb0-de955202c98b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.210306 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-edde-account-create-update-nwbxj"] Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.246578 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-ncssk"] Dec 05 06:11:23 crc kubenswrapper[4742]: W1205 06:11:23.249974 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod606e80d6_e92c_4f5d_9806_33e538679939.slice/crio-59dba5ad31cd5abbad73acfcebd1732e082aee9374707fa0e1dd43cfb7d09c1a WatchSource:0}: Error finding container 59dba5ad31cd5abbad73acfcebd1732e082aee9374707fa0e1dd43cfb7d09c1a: Status 404 returned error can't find the container with id 59dba5ad31cd5abbad73acfcebd1732e082aee9374707fa0e1dd43cfb7d09c1a Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.329362 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-2c8pq"] Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.338624 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-ncssk" event={"ID":"606e80d6-e92c-4f5d-9806-33e538679939","Type":"ContainerStarted","Data":"59dba5ad31cd5abbad73acfcebd1732e082aee9374707fa0e1dd43cfb7d09c1a"} Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.339600 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-edde-account-create-update-nwbxj" event={"ID":"4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad","Type":"ContainerStarted","Data":"11730a4a7d8bb9901465f95d07905fbb1afad07fb73bba57ef162a171d0b058b"} Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.340619 4742 generic.go:334] "Generic (PLEG): container finished" podID="9979f7f7-778c-47f3-8263-d3d93753e714" containerID="ebc99ef0e07f3df716b11ee744309c678a42c8f8c434aa81b990d99b4d657ada" exitCode=0 Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.341294 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" event={"ID":"9979f7f7-778c-47f3-8263-d3d93753e714","Type":"ContainerDied","Data":"ebc99ef0e07f3df716b11ee744309c678a42c8f8c434aa81b990d99b4d657ada"} Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.341343 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" event={"ID":"9979f7f7-778c-47f3-8263-d3d93753e714","Type":"ContainerStarted","Data":"a6a15ef699febe4ef315506f1e0c57f62da9176bd3f997cd38dfbe031a49d873"} Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.349473 4742 generic.go:334] "Generic (PLEG): container finished" podID="1ca5c510-f473-4db9-8cb0-de955202c98b" containerID="e793a240be40e34f0104a265b9bcd8bcc6c7dd200ec1a74efab5189561a3a54f" exitCode=0 Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.349557 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-r2rbf" event={"ID":"1ca5c510-f473-4db9-8cb0-de955202c98b","Type":"ContainerDied","Data":"e793a240be40e34f0104a265b9bcd8bcc6c7dd200ec1a74efab5189561a3a54f"} Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.349588 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-r2rbf" event={"ID":"1ca5c510-f473-4db9-8cb0-de955202c98b","Type":"ContainerDied","Data":"3ffeed4b44580cb70c29dd8b80d1029512a88adb9c8c50cc280ae970b271762e"} Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.349608 4742 scope.go:117] "RemoveContainer" containerID="e793a240be40e34f0104a265b9bcd8bcc6c7dd200ec1a74efab5189561a3a54f" Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.349733 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c79d794d7-r2rbf" Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.354969 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-vjmxb" event={"ID":"4e3a818a-cec4-45bb-9da7-5f26059045a8","Type":"ContainerStarted","Data":"d7d412bfdbf18e19227bb85277d4f705e94c92294be6f76ddab3aceec1f763e7"} Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.392985 4742 scope.go:117] "RemoveContainer" containerID="195bcf9bca256d5b98ca493c344d79d31f31775c9bae521c0729d8c6d99f69c8" Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.424549 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-r2rbf"] Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.434159 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-f9da-account-create-update-zhzzh"] Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.442604 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-r2rbf"] Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.449783 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-pfkmv"] Dec 05 06:11:23 crc kubenswrapper[4742]: W1205 06:11:23.469160 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7e75b039_76da_40c8_a486_0f310cafa125.slice/crio-eaa5865160ca6929389024d6f1a2c7547c3f6fe9c914ecea31d711b8cb0d3ab4 WatchSource:0}: Error finding container eaa5865160ca6929389024d6f1a2c7547c3f6fe9c914ecea31d711b8cb0d3ab4: Status 404 returned error can't find the container with id eaa5865160ca6929389024d6f1a2c7547c3f6fe9c914ecea31d711b8cb0d3ab4 Dec 05 06:11:23 crc kubenswrapper[4742]: W1205 06:11:23.484175 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod93dd367f_1105_4615_9562_2d9ad648e7a9.slice/crio-06375657fd08efbc85c80e359192704f98c4340bfd627655779bde9ea393ba9c WatchSource:0}: Error finding container 06375657fd08efbc85c80e359192704f98c4340bfd627655779bde9ea393ba9c: Status 404 returned error can't find the container with id 06375657fd08efbc85c80e359192704f98c4340bfd627655779bde9ea393ba9c Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.491745 4742 scope.go:117] "RemoveContainer" containerID="e793a240be40e34f0104a265b9bcd8bcc6c7dd200ec1a74efab5189561a3a54f" Dec 05 06:11:23 crc kubenswrapper[4742]: E1205 06:11:23.492483 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e793a240be40e34f0104a265b9bcd8bcc6c7dd200ec1a74efab5189561a3a54f\": container with ID starting with e793a240be40e34f0104a265b9bcd8bcc6c7dd200ec1a74efab5189561a3a54f not found: ID does not exist" containerID="e793a240be40e34f0104a265b9bcd8bcc6c7dd200ec1a74efab5189561a3a54f" Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.492524 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e793a240be40e34f0104a265b9bcd8bcc6c7dd200ec1a74efab5189561a3a54f"} err="failed to get container status \"e793a240be40e34f0104a265b9bcd8bcc6c7dd200ec1a74efab5189561a3a54f\": rpc error: code = NotFound desc = could not find container \"e793a240be40e34f0104a265b9bcd8bcc6c7dd200ec1a74efab5189561a3a54f\": container with ID starting with e793a240be40e34f0104a265b9bcd8bcc6c7dd200ec1a74efab5189561a3a54f not found: ID does not exist" Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.492550 4742 scope.go:117] "RemoveContainer" containerID="195bcf9bca256d5b98ca493c344d79d31f31775c9bae521c0729d8c6d99f69c8" Dec 05 06:11:23 crc kubenswrapper[4742]: E1205 06:11:23.494857 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"195bcf9bca256d5b98ca493c344d79d31f31775c9bae521c0729d8c6d99f69c8\": container with ID starting with 195bcf9bca256d5b98ca493c344d79d31f31775c9bae521c0729d8c6d99f69c8 not found: ID does not exist" containerID="195bcf9bca256d5b98ca493c344d79d31f31775c9bae521c0729d8c6d99f69c8" Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.494938 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"195bcf9bca256d5b98ca493c344d79d31f31775c9bae521c0729d8c6d99f69c8"} err="failed to get container status \"195bcf9bca256d5b98ca493c344d79d31f31775c9bae521c0729d8c6d99f69c8\": rpc error: code = NotFound desc = could not find container \"195bcf9bca256d5b98ca493c344d79d31f31775c9bae521c0729d8c6d99f69c8\": container with ID starting with 195bcf9bca256d5b98ca493c344d79d31f31775c9bae521c0729d8c6d99f69c8 not found: ID does not exist" Dec 05 06:11:23 crc kubenswrapper[4742]: I1205 06:11:23.607541 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5be2-account-create-update-tntgt"] Dec 05 06:11:23 crc kubenswrapper[4742]: W1205 06:11:23.617425 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod49b98e00_ff84_46a9_b808_262b1246348c.slice/crio-ea9f8c5da414d82aaec62caf4279469be6e9ff404093b0e25c5f1f73391f2739 WatchSource:0}: Error finding container ea9f8c5da414d82aaec62caf4279469be6e9ff404093b0e25c5f1f73391f2739: Status 404 returned error can't find the container with id ea9f8c5da414d82aaec62caf4279469be6e9ff404093b0e25c5f1f73391f2739 Dec 05 06:11:24 crc kubenswrapper[4742]: I1205 06:11:24.367823 4742 generic.go:334] "Generic (PLEG): container finished" podID="93dd367f-1105-4615-9562-2d9ad648e7a9" containerID="510a3781c536a45f9268a131d634df6000b7aa0790f8ecd41f3df0f245747f22" exitCode=0 Dec 05 06:11:24 crc kubenswrapper[4742]: I1205 06:11:24.368201 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-f9da-account-create-update-zhzzh" event={"ID":"93dd367f-1105-4615-9562-2d9ad648e7a9","Type":"ContainerDied","Data":"510a3781c536a45f9268a131d634df6000b7aa0790f8ecd41f3df0f245747f22"} Dec 05 06:11:24 crc kubenswrapper[4742]: I1205 06:11:24.369588 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-f9da-account-create-update-zhzzh" event={"ID":"93dd367f-1105-4615-9562-2d9ad648e7a9","Type":"ContainerStarted","Data":"06375657fd08efbc85c80e359192704f98c4340bfd627655779bde9ea393ba9c"} Dec 05 06:11:24 crc kubenswrapper[4742]: I1205 06:11:24.371292 4742 generic.go:334] "Generic (PLEG): container finished" podID="4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad" containerID="7e33ab88a84bfcd32ed36ac73c5cc6d18581c628f85c65c7a393f805d3496823" exitCode=0 Dec 05 06:11:24 crc kubenswrapper[4742]: I1205 06:11:24.371379 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-edde-account-create-update-nwbxj" event={"ID":"4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad","Type":"ContainerDied","Data":"7e33ab88a84bfcd32ed36ac73c5cc6d18581c628f85c65c7a393f805d3496823"} Dec 05 06:11:24 crc kubenswrapper[4742]: I1205 06:11:24.373369 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" event={"ID":"9979f7f7-778c-47f3-8263-d3d93753e714","Type":"ContainerStarted","Data":"36120690a82b6f681780c91a20a3cc8a0a413e14252b3db344b7e688541ec8f6"} Dec 05 06:11:24 crc kubenswrapper[4742]: I1205 06:11:24.374470 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" Dec 05 06:11:24 crc kubenswrapper[4742]: I1205 06:11:24.376728 4742 generic.go:334] "Generic (PLEG): container finished" podID="49b98e00-ff84-46a9-b808-262b1246348c" containerID="cf30eeea73b225196dce65f05adf26d5dd173c94ed9ebc9ce44cfa0d14ed9410" exitCode=0 Dec 05 06:11:24 crc kubenswrapper[4742]: I1205 06:11:24.376936 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5be2-account-create-update-tntgt" event={"ID":"49b98e00-ff84-46a9-b808-262b1246348c","Type":"ContainerDied","Data":"cf30eeea73b225196dce65f05adf26d5dd173c94ed9ebc9ce44cfa0d14ed9410"} Dec 05 06:11:24 crc kubenswrapper[4742]: I1205 06:11:24.377521 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5be2-account-create-update-tntgt" event={"ID":"49b98e00-ff84-46a9-b808-262b1246348c","Type":"ContainerStarted","Data":"ea9f8c5da414d82aaec62caf4279469be6e9ff404093b0e25c5f1f73391f2739"} Dec 05 06:11:24 crc kubenswrapper[4742]: I1205 06:11:24.378812 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-pfkmv" event={"ID":"7e75b039-76da-40c8-a486-0f310cafa125","Type":"ContainerStarted","Data":"eaa5865160ca6929389024d6f1a2c7547c3f6fe9c914ecea31d711b8cb0d3ab4"} Dec 05 06:11:24 crc kubenswrapper[4742]: I1205 06:11:24.391583 4742 generic.go:334] "Generic (PLEG): container finished" podID="1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0" containerID="b11b94d0ec61d166b0c11832fa3e1bd3d6d14ce98de189313ff3cf8c052111ac" exitCode=0 Dec 05 06:11:24 crc kubenswrapper[4742]: I1205 06:11:24.410796 4742 generic.go:334] "Generic (PLEG): container finished" podID="4e3a818a-cec4-45bb-9da7-5f26059045a8" containerID="a67a9bcba49907c071cc13e63399467b1596cfba2f9e45b6f818f93406035ff6" exitCode=0 Dec 05 06:11:24 crc kubenswrapper[4742]: I1205 06:11:24.417878 4742 generic.go:334] "Generic (PLEG): container finished" podID="606e80d6-e92c-4f5d-9806-33e538679939" containerID="cf62b4048920e6f2c6c7dd02a335730388cd54bf021450a140d37df0f8600700" exitCode=0 Dec 05 06:11:24 crc kubenswrapper[4742]: I1205 06:11:24.421650 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ca5c510-f473-4db9-8cb0-de955202c98b" path="/var/lib/kubelet/pods/1ca5c510-f473-4db9-8cb0-de955202c98b/volumes" Dec 05 06:11:24 crc kubenswrapper[4742]: I1205 06:11:24.426319 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-2c8pq" event={"ID":"1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0","Type":"ContainerDied","Data":"b11b94d0ec61d166b0c11832fa3e1bd3d6d14ce98de189313ff3cf8c052111ac"} Dec 05 06:11:24 crc kubenswrapper[4742]: I1205 06:11:24.426352 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-2c8pq" event={"ID":"1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0","Type":"ContainerStarted","Data":"1573207ebae119dea62f9814dc5c4e6dbf076df8617089ef53c29729ef871f58"} Dec 05 06:11:24 crc kubenswrapper[4742]: I1205 06:11:24.426365 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-vjmxb" event={"ID":"4e3a818a-cec4-45bb-9da7-5f26059045a8","Type":"ContainerDied","Data":"a67a9bcba49907c071cc13e63399467b1596cfba2f9e45b6f818f93406035ff6"} Dec 05 06:11:24 crc kubenswrapper[4742]: I1205 06:11:24.426384 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-ncssk" event={"ID":"606e80d6-e92c-4f5d-9806-33e538679939","Type":"ContainerDied","Data":"cf62b4048920e6f2c6c7dd02a335730388cd54bf021450a140d37df0f8600700"} Dec 05 06:11:24 crc kubenswrapper[4742]: I1205 06:11:24.495658 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" podStartSLOduration=3.49563496 podStartE2EDuration="3.49563496s" podCreationTimestamp="2025-12-05 06:11:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:11:24.490861013 +0000 UTC m=+1160.402996075" watchObservedRunningTime="2025-12-05 06:11:24.49563496 +0000 UTC m=+1160.407770032" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.229692 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5be2-account-create-update-tntgt" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.243548 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-edde-account-create-update-nwbxj" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.249303 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-2c8pq" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.265368 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-ncssk" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.282644 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-vjmxb" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.290517 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-f9da-account-create-update-zhzzh" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.391449 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/93dd367f-1105-4615-9562-2d9ad648e7a9-operator-scripts\") pod \"93dd367f-1105-4615-9562-2d9ad648e7a9\" (UID: \"93dd367f-1105-4615-9562-2d9ad648e7a9\") " Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.391533 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0-operator-scripts\") pod \"1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0\" (UID: \"1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0\") " Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.391578 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/606e80d6-e92c-4f5d-9806-33e538679939-operator-scripts\") pod \"606e80d6-e92c-4f5d-9806-33e538679939\" (UID: \"606e80d6-e92c-4f5d-9806-33e538679939\") " Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.391615 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad-operator-scripts\") pod \"4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad\" (UID: \"4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad\") " Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.392232 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0" (UID: "1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.392670 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93dd367f-1105-4615-9562-2d9ad648e7a9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "93dd367f-1105-4615-9562-2d9ad648e7a9" (UID: "93dd367f-1105-4615-9562-2d9ad648e7a9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.393088 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/606e80d6-e92c-4f5d-9806-33e538679939-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "606e80d6-e92c-4f5d-9806-33e538679939" (UID: "606e80d6-e92c-4f5d-9806-33e538679939"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.393692 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad" (UID: "4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.391680 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lr6t4\" (UniqueName: \"kubernetes.io/projected/4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad-kube-api-access-lr6t4\") pod \"4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad\" (UID: \"4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad\") " Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.394651 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxjlh\" (UniqueName: \"kubernetes.io/projected/4e3a818a-cec4-45bb-9da7-5f26059045a8-kube-api-access-zxjlh\") pod \"4e3a818a-cec4-45bb-9da7-5f26059045a8\" (UID: \"4e3a818a-cec4-45bb-9da7-5f26059045a8\") " Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.394681 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4hlmx\" (UniqueName: \"kubernetes.io/projected/93dd367f-1105-4615-9562-2d9ad648e7a9-kube-api-access-4hlmx\") pod \"93dd367f-1105-4615-9562-2d9ad648e7a9\" (UID: \"93dd367f-1105-4615-9562-2d9ad648e7a9\") " Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.394717 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49b98e00-ff84-46a9-b808-262b1246348c-operator-scripts\") pod \"49b98e00-ff84-46a9-b808-262b1246348c\" (UID: \"49b98e00-ff84-46a9-b808-262b1246348c\") " Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.394743 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wmpl8\" (UniqueName: \"kubernetes.io/projected/606e80d6-e92c-4f5d-9806-33e538679939-kube-api-access-wmpl8\") pod \"606e80d6-e92c-4f5d-9806-33e538679939\" (UID: \"606e80d6-e92c-4f5d-9806-33e538679939\") " Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.394782 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4e3a818a-cec4-45bb-9da7-5f26059045a8-operator-scripts\") pod \"4e3a818a-cec4-45bb-9da7-5f26059045a8\" (UID: \"4e3a818a-cec4-45bb-9da7-5f26059045a8\") " Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.394812 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-znkm6\" (UniqueName: \"kubernetes.io/projected/49b98e00-ff84-46a9-b808-262b1246348c-kube-api-access-znkm6\") pod \"49b98e00-ff84-46a9-b808-262b1246348c\" (UID: \"49b98e00-ff84-46a9-b808-262b1246348c\") " Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.394883 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tt26s\" (UniqueName: \"kubernetes.io/projected/1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0-kube-api-access-tt26s\") pod \"1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0\" (UID: \"1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0\") " Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.395603 4742 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.395624 4742 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/606e80d6-e92c-4f5d-9806-33e538679939-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.395638 4742 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.395649 4742 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/93dd367f-1105-4615-9562-2d9ad648e7a9-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.397245 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49b98e00-ff84-46a9-b808-262b1246348c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "49b98e00-ff84-46a9-b808-262b1246348c" (UID: "49b98e00-ff84-46a9-b808-262b1246348c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.397706 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e3a818a-cec4-45bb-9da7-5f26059045a8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4e3a818a-cec4-45bb-9da7-5f26059045a8" (UID: "4e3a818a-cec4-45bb-9da7-5f26059045a8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.400158 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/606e80d6-e92c-4f5d-9806-33e538679939-kube-api-access-wmpl8" (OuterVolumeSpecName: "kube-api-access-wmpl8") pod "606e80d6-e92c-4f5d-9806-33e538679939" (UID: "606e80d6-e92c-4f5d-9806-33e538679939"). InnerVolumeSpecName "kube-api-access-wmpl8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.401006 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad-kube-api-access-lr6t4" (OuterVolumeSpecName: "kube-api-access-lr6t4") pod "4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad" (UID: "4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad"). InnerVolumeSpecName "kube-api-access-lr6t4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.401850 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93dd367f-1105-4615-9562-2d9ad648e7a9-kube-api-access-4hlmx" (OuterVolumeSpecName: "kube-api-access-4hlmx") pod "93dd367f-1105-4615-9562-2d9ad648e7a9" (UID: "93dd367f-1105-4615-9562-2d9ad648e7a9"). InnerVolumeSpecName "kube-api-access-4hlmx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.402833 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49b98e00-ff84-46a9-b808-262b1246348c-kube-api-access-znkm6" (OuterVolumeSpecName: "kube-api-access-znkm6") pod "49b98e00-ff84-46a9-b808-262b1246348c" (UID: "49b98e00-ff84-46a9-b808-262b1246348c"). InnerVolumeSpecName "kube-api-access-znkm6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.419298 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e3a818a-cec4-45bb-9da7-5f26059045a8-kube-api-access-zxjlh" (OuterVolumeSpecName: "kube-api-access-zxjlh") pod "4e3a818a-cec4-45bb-9da7-5f26059045a8" (UID: "4e3a818a-cec4-45bb-9da7-5f26059045a8"). InnerVolumeSpecName "kube-api-access-zxjlh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.427163 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0-kube-api-access-tt26s" (OuterVolumeSpecName: "kube-api-access-tt26s") pod "1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0" (UID: "1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0"). InnerVolumeSpecName "kube-api-access-tt26s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.458684 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5be2-account-create-update-tntgt" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.459142 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5be2-account-create-update-tntgt" event={"ID":"49b98e00-ff84-46a9-b808-262b1246348c","Type":"ContainerDied","Data":"ea9f8c5da414d82aaec62caf4279469be6e9ff404093b0e25c5f1f73391f2739"} Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.459212 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ea9f8c5da414d82aaec62caf4279469be6e9ff404093b0e25c5f1f73391f2739" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.460762 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-pfkmv" event={"ID":"7e75b039-76da-40c8-a486-0f310cafa125","Type":"ContainerStarted","Data":"327958d28604e61b0f08263d448489011f061103aa9d6f3703a193a7c7d54c73"} Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.465006 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-vjmxb" event={"ID":"4e3a818a-cec4-45bb-9da7-5f26059045a8","Type":"ContainerDied","Data":"d7d412bfdbf18e19227bb85277d4f705e94c92294be6f76ddab3aceec1f763e7"} Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.465039 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d7d412bfdbf18e19227bb85277d4f705e94c92294be6f76ddab3aceec1f763e7" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.465137 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-vjmxb" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.468876 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-ncssk" event={"ID":"606e80d6-e92c-4f5d-9806-33e538679939","Type":"ContainerDied","Data":"59dba5ad31cd5abbad73acfcebd1732e082aee9374707fa0e1dd43cfb7d09c1a"} Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.468917 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="59dba5ad31cd5abbad73acfcebd1732e082aee9374707fa0e1dd43cfb7d09c1a" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.468978 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-ncssk" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.471419 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-edde-account-create-update-nwbxj" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.472001 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-edde-account-create-update-nwbxj" event={"ID":"4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad","Type":"ContainerDied","Data":"11730a4a7d8bb9901465f95d07905fbb1afad07fb73bba57ef162a171d0b058b"} Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.472038 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="11730a4a7d8bb9901465f95d07905fbb1afad07fb73bba57ef162a171d0b058b" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.479038 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-f9da-account-create-update-zhzzh" event={"ID":"93dd367f-1105-4615-9562-2d9ad648e7a9","Type":"ContainerDied","Data":"06375657fd08efbc85c80e359192704f98c4340bfd627655779bde9ea393ba9c"} Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.479097 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="06375657fd08efbc85c80e359192704f98c4340bfd627655779bde9ea393ba9c" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.479116 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-f9da-account-create-update-zhzzh" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.479122 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-pfkmv" podStartSLOduration=1.867134539 podStartE2EDuration="7.478468055s" podCreationTimestamp="2025-12-05 06:11:22 +0000 UTC" firstStartedPulling="2025-12-05 06:11:23.472868224 +0000 UTC m=+1159.385003286" lastFinishedPulling="2025-12-05 06:11:29.08420174 +0000 UTC m=+1164.996336802" observedRunningTime="2025-12-05 06:11:29.476378589 +0000 UTC m=+1165.388513651" watchObservedRunningTime="2025-12-05 06:11:29.478468055 +0000 UTC m=+1165.390603107" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.482117 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-2c8pq" event={"ID":"1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0","Type":"ContainerDied","Data":"1573207ebae119dea62f9814dc5c4e6dbf076df8617089ef53c29729ef871f58"} Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.482158 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1573207ebae119dea62f9814dc5c4e6dbf076df8617089ef53c29729ef871f58" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.482212 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-2c8pq" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.497084 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lr6t4\" (UniqueName: \"kubernetes.io/projected/4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad-kube-api-access-lr6t4\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.497114 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxjlh\" (UniqueName: \"kubernetes.io/projected/4e3a818a-cec4-45bb-9da7-5f26059045a8-kube-api-access-zxjlh\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.497127 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4hlmx\" (UniqueName: \"kubernetes.io/projected/93dd367f-1105-4615-9562-2d9ad648e7a9-kube-api-access-4hlmx\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.497139 4742 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49b98e00-ff84-46a9-b808-262b1246348c-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.497150 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wmpl8\" (UniqueName: \"kubernetes.io/projected/606e80d6-e92c-4f5d-9806-33e538679939-kube-api-access-wmpl8\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.497161 4742 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4e3a818a-cec4-45bb-9da7-5f26059045a8-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.497172 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-znkm6\" (UniqueName: \"kubernetes.io/projected/49b98e00-ff84-46a9-b808-262b1246348c-kube-api-access-znkm6\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:29 crc kubenswrapper[4742]: I1205 06:11:29.497183 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tt26s\" (UniqueName: \"kubernetes.io/projected/1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0-kube-api-access-tt26s\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:32 crc kubenswrapper[4742]: I1205 06:11:32.098271 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" Dec 05 06:11:32 crc kubenswrapper[4742]: I1205 06:11:32.219711 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-6z8h9"] Dec 05 06:11:32 crc kubenswrapper[4742]: I1205 06:11:32.220016 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b8fbc5445-6z8h9" podUID="66961e66-3235-4bc8-995e-106a483d8724" containerName="dnsmasq-dns" containerID="cri-o://97fbad03797344f8e1cbf777f0992d8832647f1c01b87eb27839951ba561dd71" gracePeriod=10 Dec 05 06:11:32 crc kubenswrapper[4742]: I1205 06:11:32.512031 4742 generic.go:334] "Generic (PLEG): container finished" podID="66961e66-3235-4bc8-995e-106a483d8724" containerID="97fbad03797344f8e1cbf777f0992d8832647f1c01b87eb27839951ba561dd71" exitCode=0 Dec 05 06:11:32 crc kubenswrapper[4742]: I1205 06:11:32.512127 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-6z8h9" event={"ID":"66961e66-3235-4bc8-995e-106a483d8724","Type":"ContainerDied","Data":"97fbad03797344f8e1cbf777f0992d8832647f1c01b87eb27839951ba561dd71"} Dec 05 06:11:32 crc kubenswrapper[4742]: I1205 06:11:32.513417 4742 generic.go:334] "Generic (PLEG): container finished" podID="7e75b039-76da-40c8-a486-0f310cafa125" containerID="327958d28604e61b0f08263d448489011f061103aa9d6f3703a193a7c7d54c73" exitCode=0 Dec 05 06:11:32 crc kubenswrapper[4742]: I1205 06:11:32.513443 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-pfkmv" event={"ID":"7e75b039-76da-40c8-a486-0f310cafa125","Type":"ContainerDied","Data":"327958d28604e61b0f08263d448489011f061103aa9d6f3703a193a7c7d54c73"} Dec 05 06:11:32 crc kubenswrapper[4742]: I1205 06:11:32.667316 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-6z8h9" Dec 05 06:11:32 crc kubenswrapper[4742]: I1205 06:11:32.758504 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/66961e66-3235-4bc8-995e-106a483d8724-ovsdbserver-nb\") pod \"66961e66-3235-4bc8-995e-106a483d8724\" (UID: \"66961e66-3235-4bc8-995e-106a483d8724\") " Dec 05 06:11:32 crc kubenswrapper[4742]: I1205 06:11:32.758886 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66961e66-3235-4bc8-995e-106a483d8724-config\") pod \"66961e66-3235-4bc8-995e-106a483d8724\" (UID: \"66961e66-3235-4bc8-995e-106a483d8724\") " Dec 05 06:11:32 crc kubenswrapper[4742]: I1205 06:11:32.759011 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wbknq\" (UniqueName: \"kubernetes.io/projected/66961e66-3235-4bc8-995e-106a483d8724-kube-api-access-wbknq\") pod \"66961e66-3235-4bc8-995e-106a483d8724\" (UID: \"66961e66-3235-4bc8-995e-106a483d8724\") " Dec 05 06:11:32 crc kubenswrapper[4742]: I1205 06:11:32.759181 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/66961e66-3235-4bc8-995e-106a483d8724-ovsdbserver-sb\") pod \"66961e66-3235-4bc8-995e-106a483d8724\" (UID: \"66961e66-3235-4bc8-995e-106a483d8724\") " Dec 05 06:11:32 crc kubenswrapper[4742]: I1205 06:11:32.759328 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/66961e66-3235-4bc8-995e-106a483d8724-dns-svc\") pod \"66961e66-3235-4bc8-995e-106a483d8724\" (UID: \"66961e66-3235-4bc8-995e-106a483d8724\") " Dec 05 06:11:32 crc kubenswrapper[4742]: I1205 06:11:32.774085 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66961e66-3235-4bc8-995e-106a483d8724-kube-api-access-wbknq" (OuterVolumeSpecName: "kube-api-access-wbknq") pod "66961e66-3235-4bc8-995e-106a483d8724" (UID: "66961e66-3235-4bc8-995e-106a483d8724"). InnerVolumeSpecName "kube-api-access-wbknq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:11:32 crc kubenswrapper[4742]: I1205 06:11:32.795351 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66961e66-3235-4bc8-995e-106a483d8724-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "66961e66-3235-4bc8-995e-106a483d8724" (UID: "66961e66-3235-4bc8-995e-106a483d8724"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:11:32 crc kubenswrapper[4742]: I1205 06:11:32.801454 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66961e66-3235-4bc8-995e-106a483d8724-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "66961e66-3235-4bc8-995e-106a483d8724" (UID: "66961e66-3235-4bc8-995e-106a483d8724"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:11:32 crc kubenswrapper[4742]: I1205 06:11:32.803516 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66961e66-3235-4bc8-995e-106a483d8724-config" (OuterVolumeSpecName: "config") pod "66961e66-3235-4bc8-995e-106a483d8724" (UID: "66961e66-3235-4bc8-995e-106a483d8724"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:11:32 crc kubenswrapper[4742]: I1205 06:11:32.817261 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66961e66-3235-4bc8-995e-106a483d8724-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "66961e66-3235-4bc8-995e-106a483d8724" (UID: "66961e66-3235-4bc8-995e-106a483d8724"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:11:32 crc kubenswrapper[4742]: I1205 06:11:32.860631 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/66961e66-3235-4bc8-995e-106a483d8724-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:32 crc kubenswrapper[4742]: I1205 06:11:32.860665 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wbknq\" (UniqueName: \"kubernetes.io/projected/66961e66-3235-4bc8-995e-106a483d8724-kube-api-access-wbknq\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:32 crc kubenswrapper[4742]: I1205 06:11:32.860680 4742 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/66961e66-3235-4bc8-995e-106a483d8724-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:32 crc kubenswrapper[4742]: I1205 06:11:32.860691 4742 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/66961e66-3235-4bc8-995e-106a483d8724-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:32 crc kubenswrapper[4742]: I1205 06:11:32.860700 4742 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/66961e66-3235-4bc8-995e-106a483d8724-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:33 crc kubenswrapper[4742]: I1205 06:11:33.524734 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-6z8h9" Dec 05 06:11:33 crc kubenswrapper[4742]: I1205 06:11:33.524742 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-6z8h9" event={"ID":"66961e66-3235-4bc8-995e-106a483d8724","Type":"ContainerDied","Data":"05b3e3e640582648744ee35dc205a45997888864001f778e28ab7965fac1c631"} Dec 05 06:11:33 crc kubenswrapper[4742]: I1205 06:11:33.524934 4742 scope.go:117] "RemoveContainer" containerID="97fbad03797344f8e1cbf777f0992d8832647f1c01b87eb27839951ba561dd71" Dec 05 06:11:33 crc kubenswrapper[4742]: I1205 06:11:33.562158 4742 scope.go:117] "RemoveContainer" containerID="25de760e9e0c88fbed4fd722bb636d50bce265dc8d3f1398594af46aeb0acfc7" Dec 05 06:11:33 crc kubenswrapper[4742]: I1205 06:11:33.585378 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-6z8h9"] Dec 05 06:11:33 crc kubenswrapper[4742]: I1205 06:11:33.594220 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-6z8h9"] Dec 05 06:11:33 crc kubenswrapper[4742]: I1205 06:11:33.893759 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-pfkmv" Dec 05 06:11:33 crc kubenswrapper[4742]: I1205 06:11:33.980702 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e75b039-76da-40c8-a486-0f310cafa125-config-data\") pod \"7e75b039-76da-40c8-a486-0f310cafa125\" (UID: \"7e75b039-76da-40c8-a486-0f310cafa125\") " Dec 05 06:11:33 crc kubenswrapper[4742]: I1205 06:11:33.980777 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xfjnd\" (UniqueName: \"kubernetes.io/projected/7e75b039-76da-40c8-a486-0f310cafa125-kube-api-access-xfjnd\") pod \"7e75b039-76da-40c8-a486-0f310cafa125\" (UID: \"7e75b039-76da-40c8-a486-0f310cafa125\") " Dec 05 06:11:33 crc kubenswrapper[4742]: I1205 06:11:33.980873 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e75b039-76da-40c8-a486-0f310cafa125-combined-ca-bundle\") pod \"7e75b039-76da-40c8-a486-0f310cafa125\" (UID: \"7e75b039-76da-40c8-a486-0f310cafa125\") " Dec 05 06:11:33 crc kubenswrapper[4742]: I1205 06:11:33.983917 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e75b039-76da-40c8-a486-0f310cafa125-kube-api-access-xfjnd" (OuterVolumeSpecName: "kube-api-access-xfjnd") pod "7e75b039-76da-40c8-a486-0f310cafa125" (UID: "7e75b039-76da-40c8-a486-0f310cafa125"). InnerVolumeSpecName "kube-api-access-xfjnd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.007692 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e75b039-76da-40c8-a486-0f310cafa125-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7e75b039-76da-40c8-a486-0f310cafa125" (UID: "7e75b039-76da-40c8-a486-0f310cafa125"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.039862 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e75b039-76da-40c8-a486-0f310cafa125-config-data" (OuterVolumeSpecName: "config-data") pod "7e75b039-76da-40c8-a486-0f310cafa125" (UID: "7e75b039-76da-40c8-a486-0f310cafa125"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.082079 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e75b039-76da-40c8-a486-0f310cafa125-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.082111 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e75b039-76da-40c8-a486-0f310cafa125-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.082121 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xfjnd\" (UniqueName: \"kubernetes.io/projected/7e75b039-76da-40c8-a486-0f310cafa125-kube-api-access-xfjnd\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.402922 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66961e66-3235-4bc8-995e-106a483d8724" path="/var/lib/kubelet/pods/66961e66-3235-4bc8-995e-106a483d8724/volumes" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.534198 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-pfkmv" event={"ID":"7e75b039-76da-40c8-a486-0f310cafa125","Type":"ContainerDied","Data":"eaa5865160ca6929389024d6f1a2c7547c3f6fe9c914ecea31d711b8cb0d3ab4"} Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.534235 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eaa5865160ca6929389024d6f1a2c7547c3f6fe9c914ecea31d711b8cb0d3ab4" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.534288 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-pfkmv" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.786221 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-6dw5l"] Dec 05 06:11:34 crc kubenswrapper[4742]: E1205 06:11:34.786602 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0" containerName="mariadb-database-create" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.786620 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0" containerName="mariadb-database-create" Dec 05 06:11:34 crc kubenswrapper[4742]: E1205 06:11:34.786646 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66961e66-3235-4bc8-995e-106a483d8724" containerName="dnsmasq-dns" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.786654 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="66961e66-3235-4bc8-995e-106a483d8724" containerName="dnsmasq-dns" Dec 05 06:11:34 crc kubenswrapper[4742]: E1205 06:11:34.786669 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e3a818a-cec4-45bb-9da7-5f26059045a8" containerName="mariadb-database-create" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.786676 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e3a818a-cec4-45bb-9da7-5f26059045a8" containerName="mariadb-database-create" Dec 05 06:11:34 crc kubenswrapper[4742]: E1205 06:11:34.786687 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ca5c510-f473-4db9-8cb0-de955202c98b" containerName="init" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.786693 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ca5c510-f473-4db9-8cb0-de955202c98b" containerName="init" Dec 05 06:11:34 crc kubenswrapper[4742]: E1205 06:11:34.786704 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66961e66-3235-4bc8-995e-106a483d8724" containerName="init" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.786711 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="66961e66-3235-4bc8-995e-106a483d8724" containerName="init" Dec 05 06:11:34 crc kubenswrapper[4742]: E1205 06:11:34.786720 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ca5c510-f473-4db9-8cb0-de955202c98b" containerName="dnsmasq-dns" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.786729 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ca5c510-f473-4db9-8cb0-de955202c98b" containerName="dnsmasq-dns" Dec 05 06:11:34 crc kubenswrapper[4742]: E1205 06:11:34.786745 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49b98e00-ff84-46a9-b808-262b1246348c" containerName="mariadb-account-create-update" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.786752 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="49b98e00-ff84-46a9-b808-262b1246348c" containerName="mariadb-account-create-update" Dec 05 06:11:34 crc kubenswrapper[4742]: E1205 06:11:34.786768 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad" containerName="mariadb-account-create-update" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.786775 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad" containerName="mariadb-account-create-update" Dec 05 06:11:34 crc kubenswrapper[4742]: E1205 06:11:34.786787 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93dd367f-1105-4615-9562-2d9ad648e7a9" containerName="mariadb-account-create-update" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.786796 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="93dd367f-1105-4615-9562-2d9ad648e7a9" containerName="mariadb-account-create-update" Dec 05 06:11:34 crc kubenswrapper[4742]: E1205 06:11:34.786808 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e75b039-76da-40c8-a486-0f310cafa125" containerName="keystone-db-sync" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.786816 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e75b039-76da-40c8-a486-0f310cafa125" containerName="keystone-db-sync" Dec 05 06:11:34 crc kubenswrapper[4742]: E1205 06:11:34.786833 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="606e80d6-e92c-4f5d-9806-33e538679939" containerName="mariadb-database-create" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.786840 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="606e80d6-e92c-4f5d-9806-33e538679939" containerName="mariadb-database-create" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.787017 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="606e80d6-e92c-4f5d-9806-33e538679939" containerName="mariadb-database-create" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.787042 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="93dd367f-1105-4615-9562-2d9ad648e7a9" containerName="mariadb-account-create-update" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.787079 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e75b039-76da-40c8-a486-0f310cafa125" containerName="keystone-db-sync" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.787094 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ca5c510-f473-4db9-8cb0-de955202c98b" containerName="dnsmasq-dns" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.787106 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="49b98e00-ff84-46a9-b808-262b1246348c" containerName="mariadb-account-create-update" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.787120 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e3a818a-cec4-45bb-9da7-5f26059045a8" containerName="mariadb-database-create" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.787131 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad" containerName="mariadb-account-create-update" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.787140 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0" containerName="mariadb-database-create" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.787156 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="66961e66-3235-4bc8-995e-106a483d8724" containerName="dnsmasq-dns" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.787844 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-6dw5l" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.791312 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.791502 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.791678 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.791826 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.791989 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-dbvnt" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.793210 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-jttq8"] Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.794485 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bbf5cc879-jttq8" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.806652 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-6dw5l"] Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.818412 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-jttq8"] Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.898403 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6bec9017-c170-4e34-8f40-acd703b59710-config\") pod \"dnsmasq-dns-bbf5cc879-jttq8\" (UID: \"6bec9017-c170-4e34-8f40-acd703b59710\") " pod="openstack/dnsmasq-dns-bbf5cc879-jttq8" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.898449 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6bec9017-c170-4e34-8f40-acd703b59710-ovsdbserver-nb\") pod \"dnsmasq-dns-bbf5cc879-jttq8\" (UID: \"6bec9017-c170-4e34-8f40-acd703b59710\") " pod="openstack/dnsmasq-dns-bbf5cc879-jttq8" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.898487 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-scripts\") pod \"keystone-bootstrap-6dw5l\" (UID: \"3d83f27e-57b2-4ef9-bafc-0e2f023041d0\") " pod="openstack/keystone-bootstrap-6dw5l" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.898514 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-combined-ca-bundle\") pod \"keystone-bootstrap-6dw5l\" (UID: \"3d83f27e-57b2-4ef9-bafc-0e2f023041d0\") " pod="openstack/keystone-bootstrap-6dw5l" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.898534 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzgnm\" (UniqueName: \"kubernetes.io/projected/6bec9017-c170-4e34-8f40-acd703b59710-kube-api-access-pzgnm\") pod \"dnsmasq-dns-bbf5cc879-jttq8\" (UID: \"6bec9017-c170-4e34-8f40-acd703b59710\") " pod="openstack/dnsmasq-dns-bbf5cc879-jttq8" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.898574 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-credential-keys\") pod \"keystone-bootstrap-6dw5l\" (UID: \"3d83f27e-57b2-4ef9-bafc-0e2f023041d0\") " pod="openstack/keystone-bootstrap-6dw5l" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.898609 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6bec9017-c170-4e34-8f40-acd703b59710-ovsdbserver-sb\") pod \"dnsmasq-dns-bbf5cc879-jttq8\" (UID: \"6bec9017-c170-4e34-8f40-acd703b59710\") " pod="openstack/dnsmasq-dns-bbf5cc879-jttq8" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.898626 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-fernet-keys\") pod \"keystone-bootstrap-6dw5l\" (UID: \"3d83f27e-57b2-4ef9-bafc-0e2f023041d0\") " pod="openstack/keystone-bootstrap-6dw5l" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.898657 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jrjr\" (UniqueName: \"kubernetes.io/projected/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-kube-api-access-7jrjr\") pod \"keystone-bootstrap-6dw5l\" (UID: \"3d83f27e-57b2-4ef9-bafc-0e2f023041d0\") " pod="openstack/keystone-bootstrap-6dw5l" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.898672 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-config-data\") pod \"keystone-bootstrap-6dw5l\" (UID: \"3d83f27e-57b2-4ef9-bafc-0e2f023041d0\") " pod="openstack/keystone-bootstrap-6dw5l" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.898701 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6bec9017-c170-4e34-8f40-acd703b59710-dns-swift-storage-0\") pod \"dnsmasq-dns-bbf5cc879-jttq8\" (UID: \"6bec9017-c170-4e34-8f40-acd703b59710\") " pod="openstack/dnsmasq-dns-bbf5cc879-jttq8" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.898720 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6bec9017-c170-4e34-8f40-acd703b59710-dns-svc\") pod \"dnsmasq-dns-bbf5cc879-jttq8\" (UID: \"6bec9017-c170-4e34-8f40-acd703b59710\") " pod="openstack/dnsmasq-dns-bbf5cc879-jttq8" Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.982656 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-c87xk"] Dec 05 06:11:34 crc kubenswrapper[4742]: I1205 06:11:34.983717 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-c87xk" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.000795 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-km54d" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.000903 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-4svn8"] Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.001081 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.001253 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.009668 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6bec9017-c170-4e34-8f40-acd703b59710-dns-swift-storage-0\") pod \"dnsmasq-dns-bbf5cc879-jttq8\" (UID: \"6bec9017-c170-4e34-8f40-acd703b59710\") " pod="openstack/dnsmasq-dns-bbf5cc879-jttq8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.009719 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6bec9017-c170-4e34-8f40-acd703b59710-dns-svc\") pod \"dnsmasq-dns-bbf5cc879-jttq8\" (UID: \"6bec9017-c170-4e34-8f40-acd703b59710\") " pod="openstack/dnsmasq-dns-bbf5cc879-jttq8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.009744 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-4svn8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.009749 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6bec9017-c170-4e34-8f40-acd703b59710-config\") pod \"dnsmasq-dns-bbf5cc879-jttq8\" (UID: \"6bec9017-c170-4e34-8f40-acd703b59710\") " pod="openstack/dnsmasq-dns-bbf5cc879-jttq8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.011819 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6bec9017-c170-4e34-8f40-acd703b59710-ovsdbserver-nb\") pod \"dnsmasq-dns-bbf5cc879-jttq8\" (UID: \"6bec9017-c170-4e34-8f40-acd703b59710\") " pod="openstack/dnsmasq-dns-bbf5cc879-jttq8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.011853 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6bec9017-c170-4e34-8f40-acd703b59710-config\") pod \"dnsmasq-dns-bbf5cc879-jttq8\" (UID: \"6bec9017-c170-4e34-8f40-acd703b59710\") " pod="openstack/dnsmasq-dns-bbf5cc879-jttq8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.011877 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-scripts\") pod \"keystone-bootstrap-6dw5l\" (UID: \"3d83f27e-57b2-4ef9-bafc-0e2f023041d0\") " pod="openstack/keystone-bootstrap-6dw5l" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.011918 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-combined-ca-bundle\") pod \"keystone-bootstrap-6dw5l\" (UID: \"3d83f27e-57b2-4ef9-bafc-0e2f023041d0\") " pod="openstack/keystone-bootstrap-6dw5l" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.011958 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzgnm\" (UniqueName: \"kubernetes.io/projected/6bec9017-c170-4e34-8f40-acd703b59710-kube-api-access-pzgnm\") pod \"dnsmasq-dns-bbf5cc879-jttq8\" (UID: \"6bec9017-c170-4e34-8f40-acd703b59710\") " pod="openstack/dnsmasq-dns-bbf5cc879-jttq8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.012087 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-credential-keys\") pod \"keystone-bootstrap-6dw5l\" (UID: \"3d83f27e-57b2-4ef9-bafc-0e2f023041d0\") " pod="openstack/keystone-bootstrap-6dw5l" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.012191 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6bec9017-c170-4e34-8f40-acd703b59710-ovsdbserver-sb\") pod \"dnsmasq-dns-bbf5cc879-jttq8\" (UID: \"6bec9017-c170-4e34-8f40-acd703b59710\") " pod="openstack/dnsmasq-dns-bbf5cc879-jttq8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.012256 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-fernet-keys\") pod \"keystone-bootstrap-6dw5l\" (UID: \"3d83f27e-57b2-4ef9-bafc-0e2f023041d0\") " pod="openstack/keystone-bootstrap-6dw5l" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.012342 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jrjr\" (UniqueName: \"kubernetes.io/projected/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-kube-api-access-7jrjr\") pod \"keystone-bootstrap-6dw5l\" (UID: \"3d83f27e-57b2-4ef9-bafc-0e2f023041d0\") " pod="openstack/keystone-bootstrap-6dw5l" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.012374 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-config-data\") pod \"keystone-bootstrap-6dw5l\" (UID: \"3d83f27e-57b2-4ef9-bafc-0e2f023041d0\") " pod="openstack/keystone-bootstrap-6dw5l" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.012434 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6bec9017-c170-4e34-8f40-acd703b59710-dns-svc\") pod \"dnsmasq-dns-bbf5cc879-jttq8\" (UID: \"6bec9017-c170-4e34-8f40-acd703b59710\") " pod="openstack/dnsmasq-dns-bbf5cc879-jttq8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.012599 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6bec9017-c170-4e34-8f40-acd703b59710-ovsdbserver-nb\") pod \"dnsmasq-dns-bbf5cc879-jttq8\" (UID: \"6bec9017-c170-4e34-8f40-acd703b59710\") " pod="openstack/dnsmasq-dns-bbf5cc879-jttq8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.012640 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6bec9017-c170-4e34-8f40-acd703b59710-dns-swift-storage-0\") pod \"dnsmasq-dns-bbf5cc879-jttq8\" (UID: \"6bec9017-c170-4e34-8f40-acd703b59710\") " pod="openstack/dnsmasq-dns-bbf5cc879-jttq8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.013612 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6bec9017-c170-4e34-8f40-acd703b59710-ovsdbserver-sb\") pod \"dnsmasq-dns-bbf5cc879-jttq8\" (UID: \"6bec9017-c170-4e34-8f40-acd703b59710\") " pod="openstack/dnsmasq-dns-bbf5cc879-jttq8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.025299 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.025507 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-5rhld" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.045732 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-scripts\") pod \"keystone-bootstrap-6dw5l\" (UID: \"3d83f27e-57b2-4ef9-bafc-0e2f023041d0\") " pod="openstack/keystone-bootstrap-6dw5l" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.046683 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-config-data\") pod \"keystone-bootstrap-6dw5l\" (UID: \"3d83f27e-57b2-4ef9-bafc-0e2f023041d0\") " pod="openstack/keystone-bootstrap-6dw5l" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.046685 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.047450 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-fernet-keys\") pod \"keystone-bootstrap-6dw5l\" (UID: \"3d83f27e-57b2-4ef9-bafc-0e2f023041d0\") " pod="openstack/keystone-bootstrap-6dw5l" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.053034 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jrjr\" (UniqueName: \"kubernetes.io/projected/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-kube-api-access-7jrjr\") pod \"keystone-bootstrap-6dw5l\" (UID: \"3d83f27e-57b2-4ef9-bafc-0e2f023041d0\") " pod="openstack/keystone-bootstrap-6dw5l" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.100758 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-credential-keys\") pod \"keystone-bootstrap-6dw5l\" (UID: \"3d83f27e-57b2-4ef9-bafc-0e2f023041d0\") " pod="openstack/keystone-bootstrap-6dw5l" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.104600 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-combined-ca-bundle\") pod \"keystone-bootstrap-6dw5l\" (UID: \"3d83f27e-57b2-4ef9-bafc-0e2f023041d0\") " pod="openstack/keystone-bootstrap-6dw5l" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.115275 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/9d56b0e1-cf28-4913-af18-4c13aafc539a-config\") pod \"neutron-db-sync-c87xk\" (UID: \"9d56b0e1-cf28-4913-af18-4c13aafc539a\") " pod="openstack/neutron-db-sync-c87xk" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.115329 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxmzv\" (UniqueName: \"kubernetes.io/projected/9d56b0e1-cf28-4913-af18-4c13aafc539a-kube-api-access-cxmzv\") pod \"neutron-db-sync-c87xk\" (UID: \"9d56b0e1-cf28-4913-af18-4c13aafc539a\") " pod="openstack/neutron-db-sync-c87xk" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.115377 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d56b0e1-cf28-4913-af18-4c13aafc539a-combined-ca-bundle\") pod \"neutron-db-sync-c87xk\" (UID: \"9d56b0e1-cf28-4913-af18-4c13aafc539a\") " pod="openstack/neutron-db-sync-c87xk" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.115421 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5vnp\" (UniqueName: \"kubernetes.io/projected/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-kube-api-access-l5vnp\") pod \"cinder-db-sync-4svn8\" (UID: \"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1\") " pod="openstack/cinder-db-sync-4svn8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.115477 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-etc-machine-id\") pod \"cinder-db-sync-4svn8\" (UID: \"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1\") " pod="openstack/cinder-db-sync-4svn8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.115521 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-combined-ca-bundle\") pod \"cinder-db-sync-4svn8\" (UID: \"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1\") " pod="openstack/cinder-db-sync-4svn8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.115544 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-scripts\") pod \"cinder-db-sync-4svn8\" (UID: \"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1\") " pod="openstack/cinder-db-sync-4svn8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.115569 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-config-data\") pod \"cinder-db-sync-4svn8\" (UID: \"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1\") " pod="openstack/cinder-db-sync-4svn8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.115585 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-db-sync-config-data\") pod \"cinder-db-sync-4svn8\" (UID: \"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1\") " pod="openstack/cinder-db-sync-4svn8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.117813 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzgnm\" (UniqueName: \"kubernetes.io/projected/6bec9017-c170-4e34-8f40-acd703b59710-kube-api-access-pzgnm\") pod \"dnsmasq-dns-bbf5cc879-jttq8\" (UID: \"6bec9017-c170-4e34-8f40-acd703b59710\") " pod="openstack/dnsmasq-dns-bbf5cc879-jttq8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.119040 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-4svn8"] Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.124968 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-6dw5l" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.153459 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bbf5cc879-jttq8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.166872 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-c87xk"] Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.211141 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.213327 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.216398 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-scripts\") pod \"cinder-db-sync-4svn8\" (UID: \"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1\") " pod="openstack/cinder-db-sync-4svn8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.216435 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-config-data\") pod \"cinder-db-sync-4svn8\" (UID: \"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1\") " pod="openstack/cinder-db-sync-4svn8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.216453 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-db-sync-config-data\") pod \"cinder-db-sync-4svn8\" (UID: \"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1\") " pod="openstack/cinder-db-sync-4svn8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.216481 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/9d56b0e1-cf28-4913-af18-4c13aafc539a-config\") pod \"neutron-db-sync-c87xk\" (UID: \"9d56b0e1-cf28-4913-af18-4c13aafc539a\") " pod="openstack/neutron-db-sync-c87xk" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.216497 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxmzv\" (UniqueName: \"kubernetes.io/projected/9d56b0e1-cf28-4913-af18-4c13aafc539a-kube-api-access-cxmzv\") pod \"neutron-db-sync-c87xk\" (UID: \"9d56b0e1-cf28-4913-af18-4c13aafc539a\") " pod="openstack/neutron-db-sync-c87xk" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.216526 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.216532 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d56b0e1-cf28-4913-af18-4c13aafc539a-combined-ca-bundle\") pod \"neutron-db-sync-c87xk\" (UID: \"9d56b0e1-cf28-4913-af18-4c13aafc539a\") " pod="openstack/neutron-db-sync-c87xk" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.217334 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5vnp\" (UniqueName: \"kubernetes.io/projected/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-kube-api-access-l5vnp\") pod \"cinder-db-sync-4svn8\" (UID: \"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1\") " pod="openstack/cinder-db-sync-4svn8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.217469 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-etc-machine-id\") pod \"cinder-db-sync-4svn8\" (UID: \"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1\") " pod="openstack/cinder-db-sync-4svn8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.217551 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-combined-ca-bundle\") pod \"cinder-db-sync-4svn8\" (UID: \"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1\") " pod="openstack/cinder-db-sync-4svn8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.218271 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.224349 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-scripts\") pod \"cinder-db-sync-4svn8\" (UID: \"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1\") " pod="openstack/cinder-db-sync-4svn8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.228270 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-etc-machine-id\") pod \"cinder-db-sync-4svn8\" (UID: \"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1\") " pod="openstack/cinder-db-sync-4svn8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.228322 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.228893 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-db-sync-config-data\") pod \"cinder-db-sync-4svn8\" (UID: \"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1\") " pod="openstack/cinder-db-sync-4svn8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.243443 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d56b0e1-cf28-4913-af18-4c13aafc539a-combined-ca-bundle\") pod \"neutron-db-sync-c87xk\" (UID: \"9d56b0e1-cf28-4913-af18-4c13aafc539a\") " pod="openstack/neutron-db-sync-c87xk" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.245483 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-jttq8"] Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.249615 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/9d56b0e1-cf28-4913-af18-4c13aafc539a-config\") pod \"neutron-db-sync-c87xk\" (UID: \"9d56b0e1-cf28-4913-af18-4c13aafc539a\") " pod="openstack/neutron-db-sync-c87xk" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.249744 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-combined-ca-bundle\") pod \"cinder-db-sync-4svn8\" (UID: \"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1\") " pod="openstack/cinder-db-sync-4svn8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.251389 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxmzv\" (UniqueName: \"kubernetes.io/projected/9d56b0e1-cf28-4913-af18-4c13aafc539a-kube-api-access-cxmzv\") pod \"neutron-db-sync-c87xk\" (UID: \"9d56b0e1-cf28-4913-af18-4c13aafc539a\") " pod="openstack/neutron-db-sync-c87xk" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.252441 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-j7dqz"] Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.253643 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-j7dqz" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.257796 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-config-data\") pod \"cinder-db-sync-4svn8\" (UID: \"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1\") " pod="openstack/cinder-db-sync-4svn8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.258650 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.258811 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-992rv" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.258912 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.260241 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5vnp\" (UniqueName: \"kubernetes.io/projected/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-kube-api-access-l5vnp\") pod \"cinder-db-sync-4svn8\" (UID: \"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1\") " pod="openstack/cinder-db-sync-4svn8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.268888 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-j7dqz"] Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.291079 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-22vpx"] Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.292180 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-22vpx" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.294743 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-7rss2" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.294997 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.305839 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-6xnkh"] Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.310580 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-c87xk" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.311233 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-6xnkh" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.320867 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/388e7953-5819-4978-842c-1cf54fd568c9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"388e7953-5819-4978-842c-1cf54fd568c9\") " pod="openstack/ceilometer-0" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.320910 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4df39ed0-9850-4409-8648-724b15671640-logs\") pod \"placement-db-sync-j7dqz\" (UID: \"4df39ed0-9850-4409-8648-724b15671640\") " pod="openstack/placement-db-sync-j7dqz" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.320931 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/388e7953-5819-4978-842c-1cf54fd568c9-config-data\") pod \"ceilometer-0\" (UID: \"388e7953-5819-4978-842c-1cf54fd568c9\") " pod="openstack/ceilometer-0" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.320946 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/388e7953-5819-4978-842c-1cf54fd568c9-log-httpd\") pod \"ceilometer-0\" (UID: \"388e7953-5819-4978-842c-1cf54fd568c9\") " pod="openstack/ceilometer-0" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.320961 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/388e7953-5819-4978-842c-1cf54fd568c9-scripts\") pod \"ceilometer-0\" (UID: \"388e7953-5819-4978-842c-1cf54fd568c9\") " pod="openstack/ceilometer-0" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.320986 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/388e7953-5819-4978-842c-1cf54fd568c9-run-httpd\") pod \"ceilometer-0\" (UID: \"388e7953-5819-4978-842c-1cf54fd568c9\") " pod="openstack/ceilometer-0" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.321003 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4df39ed0-9850-4409-8648-724b15671640-scripts\") pod \"placement-db-sync-j7dqz\" (UID: \"4df39ed0-9850-4409-8648-724b15671640\") " pod="openstack/placement-db-sync-j7dqz" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.321047 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4df39ed0-9850-4409-8648-724b15671640-config-data\") pod \"placement-db-sync-j7dqz\" (UID: \"4df39ed0-9850-4409-8648-724b15671640\") " pod="openstack/placement-db-sync-j7dqz" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.321089 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/388e7953-5819-4978-842c-1cf54fd568c9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"388e7953-5819-4978-842c-1cf54fd568c9\") " pod="openstack/ceilometer-0" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.321118 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4df39ed0-9850-4409-8648-724b15671640-combined-ca-bundle\") pod \"placement-db-sync-j7dqz\" (UID: \"4df39ed0-9850-4409-8648-724b15671640\") " pod="openstack/placement-db-sync-j7dqz" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.321171 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xh44w\" (UniqueName: \"kubernetes.io/projected/4df39ed0-9850-4409-8648-724b15671640-kube-api-access-xh44w\") pod \"placement-db-sync-j7dqz\" (UID: \"4df39ed0-9850-4409-8648-724b15671640\") " pod="openstack/placement-db-sync-j7dqz" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.321194 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmp8w\" (UniqueName: \"kubernetes.io/projected/388e7953-5819-4978-842c-1cf54fd568c9-kube-api-access-lmp8w\") pod \"ceilometer-0\" (UID: \"388e7953-5819-4978-842c-1cf54fd568c9\") " pod="openstack/ceilometer-0" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.330648 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-22vpx"] Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.331364 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-6xnkh"] Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.425175 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/388e7953-5819-4978-842c-1cf54fd568c9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"388e7953-5819-4978-842c-1cf54fd568c9\") " pod="openstack/ceilometer-0" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.425267 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d4b22256-5998-4055-acd4-6828f54186f8-ovsdbserver-sb\") pod \"dnsmasq-dns-56df8fb6b7-6xnkh\" (UID: \"d4b22256-5998-4055-acd4-6828f54186f8\") " pod="openstack/dnsmasq-dns-56df8fb6b7-6xnkh" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.425318 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4df39ed0-9850-4409-8648-724b15671640-logs\") pod \"placement-db-sync-j7dqz\" (UID: \"4df39ed0-9850-4409-8648-724b15671640\") " pod="openstack/placement-db-sync-j7dqz" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.425345 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/388e7953-5819-4978-842c-1cf54fd568c9-config-data\") pod \"ceilometer-0\" (UID: \"388e7953-5819-4978-842c-1cf54fd568c9\") " pod="openstack/ceilometer-0" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.425368 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqlb9\" (UniqueName: \"kubernetes.io/projected/d4b22256-5998-4055-acd4-6828f54186f8-kube-api-access-zqlb9\") pod \"dnsmasq-dns-56df8fb6b7-6xnkh\" (UID: \"d4b22256-5998-4055-acd4-6828f54186f8\") " pod="openstack/dnsmasq-dns-56df8fb6b7-6xnkh" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.425392 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/388e7953-5819-4978-842c-1cf54fd568c9-log-httpd\") pod \"ceilometer-0\" (UID: \"388e7953-5819-4978-842c-1cf54fd568c9\") " pod="openstack/ceilometer-0" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.425413 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e8bface-3ae8-4a16-85c0-eca434ca57f1-combined-ca-bundle\") pod \"barbican-db-sync-22vpx\" (UID: \"1e8bface-3ae8-4a16-85c0-eca434ca57f1\") " pod="openstack/barbican-db-sync-22vpx" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.425432 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/388e7953-5819-4978-842c-1cf54fd568c9-scripts\") pod \"ceilometer-0\" (UID: \"388e7953-5819-4978-842c-1cf54fd568c9\") " pod="openstack/ceilometer-0" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.425450 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2cdd\" (UniqueName: \"kubernetes.io/projected/1e8bface-3ae8-4a16-85c0-eca434ca57f1-kube-api-access-q2cdd\") pod \"barbican-db-sync-22vpx\" (UID: \"1e8bface-3ae8-4a16-85c0-eca434ca57f1\") " pod="openstack/barbican-db-sync-22vpx" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.425489 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/388e7953-5819-4978-842c-1cf54fd568c9-run-httpd\") pod \"ceilometer-0\" (UID: \"388e7953-5819-4978-842c-1cf54fd568c9\") " pod="openstack/ceilometer-0" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.425508 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4b22256-5998-4055-acd4-6828f54186f8-dns-svc\") pod \"dnsmasq-dns-56df8fb6b7-6xnkh\" (UID: \"d4b22256-5998-4055-acd4-6828f54186f8\") " pod="openstack/dnsmasq-dns-56df8fb6b7-6xnkh" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.425526 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4df39ed0-9850-4409-8648-724b15671640-scripts\") pod \"placement-db-sync-j7dqz\" (UID: \"4df39ed0-9850-4409-8648-724b15671640\") " pod="openstack/placement-db-sync-j7dqz" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.425557 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1e8bface-3ae8-4a16-85c0-eca434ca57f1-db-sync-config-data\") pod \"barbican-db-sync-22vpx\" (UID: \"1e8bface-3ae8-4a16-85c0-eca434ca57f1\") " pod="openstack/barbican-db-sync-22vpx" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.425575 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4df39ed0-9850-4409-8648-724b15671640-config-data\") pod \"placement-db-sync-j7dqz\" (UID: \"4df39ed0-9850-4409-8648-724b15671640\") " pod="openstack/placement-db-sync-j7dqz" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.425592 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/388e7953-5819-4978-842c-1cf54fd568c9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"388e7953-5819-4978-842c-1cf54fd568c9\") " pod="openstack/ceilometer-0" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.425613 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4df39ed0-9850-4409-8648-724b15671640-combined-ca-bundle\") pod \"placement-db-sync-j7dqz\" (UID: \"4df39ed0-9850-4409-8648-724b15671640\") " pod="openstack/placement-db-sync-j7dqz" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.425638 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xh44w\" (UniqueName: \"kubernetes.io/projected/4df39ed0-9850-4409-8648-724b15671640-kube-api-access-xh44w\") pod \"placement-db-sync-j7dqz\" (UID: \"4df39ed0-9850-4409-8648-724b15671640\") " pod="openstack/placement-db-sync-j7dqz" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.425657 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4b22256-5998-4055-acd4-6828f54186f8-config\") pod \"dnsmasq-dns-56df8fb6b7-6xnkh\" (UID: \"d4b22256-5998-4055-acd4-6828f54186f8\") " pod="openstack/dnsmasq-dns-56df8fb6b7-6xnkh" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.425677 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d4b22256-5998-4055-acd4-6828f54186f8-ovsdbserver-nb\") pod \"dnsmasq-dns-56df8fb6b7-6xnkh\" (UID: \"d4b22256-5998-4055-acd4-6828f54186f8\") " pod="openstack/dnsmasq-dns-56df8fb6b7-6xnkh" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.425694 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmp8w\" (UniqueName: \"kubernetes.io/projected/388e7953-5819-4978-842c-1cf54fd568c9-kube-api-access-lmp8w\") pod \"ceilometer-0\" (UID: \"388e7953-5819-4978-842c-1cf54fd568c9\") " pod="openstack/ceilometer-0" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.425721 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d4b22256-5998-4055-acd4-6828f54186f8-dns-swift-storage-0\") pod \"dnsmasq-dns-56df8fb6b7-6xnkh\" (UID: \"d4b22256-5998-4055-acd4-6828f54186f8\") " pod="openstack/dnsmasq-dns-56df8fb6b7-6xnkh" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.427047 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4df39ed0-9850-4409-8648-724b15671640-logs\") pod \"placement-db-sync-j7dqz\" (UID: \"4df39ed0-9850-4409-8648-724b15671640\") " pod="openstack/placement-db-sync-j7dqz" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.429024 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/388e7953-5819-4978-842c-1cf54fd568c9-run-httpd\") pod \"ceilometer-0\" (UID: \"388e7953-5819-4978-842c-1cf54fd568c9\") " pod="openstack/ceilometer-0" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.429186 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/388e7953-5819-4978-842c-1cf54fd568c9-log-httpd\") pod \"ceilometer-0\" (UID: \"388e7953-5819-4978-842c-1cf54fd568c9\") " pod="openstack/ceilometer-0" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.429168 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/388e7953-5819-4978-842c-1cf54fd568c9-scripts\") pod \"ceilometer-0\" (UID: \"388e7953-5819-4978-842c-1cf54fd568c9\") " pod="openstack/ceilometer-0" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.429647 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4df39ed0-9850-4409-8648-724b15671640-scripts\") pod \"placement-db-sync-j7dqz\" (UID: \"4df39ed0-9850-4409-8648-724b15671640\") " pod="openstack/placement-db-sync-j7dqz" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.433663 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/388e7953-5819-4978-842c-1cf54fd568c9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"388e7953-5819-4978-842c-1cf54fd568c9\") " pod="openstack/ceilometer-0" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.433742 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4df39ed0-9850-4409-8648-724b15671640-config-data\") pod \"placement-db-sync-j7dqz\" (UID: \"4df39ed0-9850-4409-8648-724b15671640\") " pod="openstack/placement-db-sync-j7dqz" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.434506 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/388e7953-5819-4978-842c-1cf54fd568c9-config-data\") pod \"ceilometer-0\" (UID: \"388e7953-5819-4978-842c-1cf54fd568c9\") " pod="openstack/ceilometer-0" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.434830 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/388e7953-5819-4978-842c-1cf54fd568c9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"388e7953-5819-4978-842c-1cf54fd568c9\") " pod="openstack/ceilometer-0" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.446894 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xh44w\" (UniqueName: \"kubernetes.io/projected/4df39ed0-9850-4409-8648-724b15671640-kube-api-access-xh44w\") pod \"placement-db-sync-j7dqz\" (UID: \"4df39ed0-9850-4409-8648-724b15671640\") " pod="openstack/placement-db-sync-j7dqz" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.452309 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmp8w\" (UniqueName: \"kubernetes.io/projected/388e7953-5819-4978-842c-1cf54fd568c9-kube-api-access-lmp8w\") pod \"ceilometer-0\" (UID: \"388e7953-5819-4978-842c-1cf54fd568c9\") " pod="openstack/ceilometer-0" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.452824 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4df39ed0-9850-4409-8648-724b15671640-combined-ca-bundle\") pod \"placement-db-sync-j7dqz\" (UID: \"4df39ed0-9850-4409-8648-724b15671640\") " pod="openstack/placement-db-sync-j7dqz" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.492664 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-4svn8" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.528025 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d4b22256-5998-4055-acd4-6828f54186f8-ovsdbserver-sb\") pod \"dnsmasq-dns-56df8fb6b7-6xnkh\" (UID: \"d4b22256-5998-4055-acd4-6828f54186f8\") " pod="openstack/dnsmasq-dns-56df8fb6b7-6xnkh" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.528100 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zqlb9\" (UniqueName: \"kubernetes.io/projected/d4b22256-5998-4055-acd4-6828f54186f8-kube-api-access-zqlb9\") pod \"dnsmasq-dns-56df8fb6b7-6xnkh\" (UID: \"d4b22256-5998-4055-acd4-6828f54186f8\") " pod="openstack/dnsmasq-dns-56df8fb6b7-6xnkh" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.528121 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e8bface-3ae8-4a16-85c0-eca434ca57f1-combined-ca-bundle\") pod \"barbican-db-sync-22vpx\" (UID: \"1e8bface-3ae8-4a16-85c0-eca434ca57f1\") " pod="openstack/barbican-db-sync-22vpx" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.528146 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2cdd\" (UniqueName: \"kubernetes.io/projected/1e8bface-3ae8-4a16-85c0-eca434ca57f1-kube-api-access-q2cdd\") pod \"barbican-db-sync-22vpx\" (UID: \"1e8bface-3ae8-4a16-85c0-eca434ca57f1\") " pod="openstack/barbican-db-sync-22vpx" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.528177 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4b22256-5998-4055-acd4-6828f54186f8-dns-svc\") pod \"dnsmasq-dns-56df8fb6b7-6xnkh\" (UID: \"d4b22256-5998-4055-acd4-6828f54186f8\") " pod="openstack/dnsmasq-dns-56df8fb6b7-6xnkh" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.528222 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1e8bface-3ae8-4a16-85c0-eca434ca57f1-db-sync-config-data\") pod \"barbican-db-sync-22vpx\" (UID: \"1e8bface-3ae8-4a16-85c0-eca434ca57f1\") " pod="openstack/barbican-db-sync-22vpx" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.528282 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4b22256-5998-4055-acd4-6828f54186f8-config\") pod \"dnsmasq-dns-56df8fb6b7-6xnkh\" (UID: \"d4b22256-5998-4055-acd4-6828f54186f8\") " pod="openstack/dnsmasq-dns-56df8fb6b7-6xnkh" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.528306 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d4b22256-5998-4055-acd4-6828f54186f8-ovsdbserver-nb\") pod \"dnsmasq-dns-56df8fb6b7-6xnkh\" (UID: \"d4b22256-5998-4055-acd4-6828f54186f8\") " pod="openstack/dnsmasq-dns-56df8fb6b7-6xnkh" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.528338 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d4b22256-5998-4055-acd4-6828f54186f8-dns-swift-storage-0\") pod \"dnsmasq-dns-56df8fb6b7-6xnkh\" (UID: \"d4b22256-5998-4055-acd4-6828f54186f8\") " pod="openstack/dnsmasq-dns-56df8fb6b7-6xnkh" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.529530 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4b22256-5998-4055-acd4-6828f54186f8-dns-svc\") pod \"dnsmasq-dns-56df8fb6b7-6xnkh\" (UID: \"d4b22256-5998-4055-acd4-6828f54186f8\") " pod="openstack/dnsmasq-dns-56df8fb6b7-6xnkh" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.530167 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d4b22256-5998-4055-acd4-6828f54186f8-ovsdbserver-sb\") pod \"dnsmasq-dns-56df8fb6b7-6xnkh\" (UID: \"d4b22256-5998-4055-acd4-6828f54186f8\") " pod="openstack/dnsmasq-dns-56df8fb6b7-6xnkh" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.530544 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4b22256-5998-4055-acd4-6828f54186f8-config\") pod \"dnsmasq-dns-56df8fb6b7-6xnkh\" (UID: \"d4b22256-5998-4055-acd4-6828f54186f8\") " pod="openstack/dnsmasq-dns-56df8fb6b7-6xnkh" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.531380 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d4b22256-5998-4055-acd4-6828f54186f8-ovsdbserver-nb\") pod \"dnsmasq-dns-56df8fb6b7-6xnkh\" (UID: \"d4b22256-5998-4055-acd4-6828f54186f8\") " pod="openstack/dnsmasq-dns-56df8fb6b7-6xnkh" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.531527 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d4b22256-5998-4055-acd4-6828f54186f8-dns-swift-storage-0\") pod \"dnsmasq-dns-56df8fb6b7-6xnkh\" (UID: \"d4b22256-5998-4055-acd4-6828f54186f8\") " pod="openstack/dnsmasq-dns-56df8fb6b7-6xnkh" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.534084 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1e8bface-3ae8-4a16-85c0-eca434ca57f1-db-sync-config-data\") pod \"barbican-db-sync-22vpx\" (UID: \"1e8bface-3ae8-4a16-85c0-eca434ca57f1\") " pod="openstack/barbican-db-sync-22vpx" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.534314 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e8bface-3ae8-4a16-85c0-eca434ca57f1-combined-ca-bundle\") pod \"barbican-db-sync-22vpx\" (UID: \"1e8bface-3ae8-4a16-85c0-eca434ca57f1\") " pod="openstack/barbican-db-sync-22vpx" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.546312 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2cdd\" (UniqueName: \"kubernetes.io/projected/1e8bface-3ae8-4a16-85c0-eca434ca57f1-kube-api-access-q2cdd\") pod \"barbican-db-sync-22vpx\" (UID: \"1e8bface-3ae8-4a16-85c0-eca434ca57f1\") " pod="openstack/barbican-db-sync-22vpx" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.550120 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zqlb9\" (UniqueName: \"kubernetes.io/projected/d4b22256-5998-4055-acd4-6828f54186f8-kube-api-access-zqlb9\") pod \"dnsmasq-dns-56df8fb6b7-6xnkh\" (UID: \"d4b22256-5998-4055-acd4-6828f54186f8\") " pod="openstack/dnsmasq-dns-56df8fb6b7-6xnkh" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.554572 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.593463 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-j7dqz" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.611536 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-22vpx" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.635285 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-6xnkh" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.669480 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-jttq8"] Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.767082 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-6dw5l"] Dec 05 06:11:35 crc kubenswrapper[4742]: W1205 06:11:35.788124 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3d83f27e_57b2_4ef9_bafc_0e2f023041d0.slice/crio-542d333f2d9575151e7c2b3d97ad049d1bb8a457193668c993a592b6b0a232ec WatchSource:0}: Error finding container 542d333f2d9575151e7c2b3d97ad049d1bb8a457193668c993a592b6b0a232ec: Status 404 returned error can't find the container with id 542d333f2d9575151e7c2b3d97ad049d1bb8a457193668c993a592b6b0a232ec Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.890479 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-c87xk"] Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.926488 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.928477 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.937465 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.942861 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.942930 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.943125 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 05 06:11:35 crc kubenswrapper[4742]: I1205 06:11:35.943227 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-jk4gs" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.010215 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.012560 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.016143 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.016201 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.031144 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.039865 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-4svn8"] Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.047557 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13413141-d97d-4fcc-a854-717ad77a22e5-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.047667 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/13413141-d97d-4fcc-a854-717ad77a22e5-logs\") pod \"glance-default-internal-api-0\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.047712 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cb8g7\" (UniqueName: \"kubernetes.io/projected/13413141-d97d-4fcc-a854-717ad77a22e5-kube-api-access-cb8g7\") pod \"glance-default-internal-api-0\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.047741 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/13413141-d97d-4fcc-a854-717ad77a22e5-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.047757 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13413141-d97d-4fcc-a854-717ad77a22e5-config-data\") pod \"glance-default-internal-api-0\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.047854 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13413141-d97d-4fcc-a854-717ad77a22e5-scripts\") pod \"glance-default-internal-api-0\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.047883 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.047910 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/13413141-d97d-4fcc-a854-717ad77a22e5-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.159258 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bd351f0d-461e-4d10-9804-36f61fb0ed1d-scripts\") pod \"glance-default-external-api-0\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") " pod="openstack/glance-default-external-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.159388 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13413141-d97d-4fcc-a854-717ad77a22e5-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.159425 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/13413141-d97d-4fcc-a854-717ad77a22e5-logs\") pod \"glance-default-internal-api-0\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.159475 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cb8g7\" (UniqueName: \"kubernetes.io/projected/13413141-d97d-4fcc-a854-717ad77a22e5-kube-api-access-cb8g7\") pod \"glance-default-internal-api-0\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.159494 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/13413141-d97d-4fcc-a854-717ad77a22e5-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.159510 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13413141-d97d-4fcc-a854-717ad77a22e5-config-data\") pod \"glance-default-internal-api-0\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.159562 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd351f0d-461e-4d10-9804-36f61fb0ed1d-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") " pod="openstack/glance-default-external-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.159628 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") " pod="openstack/glance-default-external-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.159646 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd351f0d-461e-4d10-9804-36f61fb0ed1d-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") " pod="openstack/glance-default-external-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.159667 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bd351f0d-461e-4d10-9804-36f61fb0ed1d-logs\") pod \"glance-default-external-api-0\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") " pod="openstack/glance-default-external-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.159713 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lc6n4\" (UniqueName: \"kubernetes.io/projected/bd351f0d-461e-4d10-9804-36f61fb0ed1d-kube-api-access-lc6n4\") pod \"glance-default-external-api-0\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") " pod="openstack/glance-default-external-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.159733 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13413141-d97d-4fcc-a854-717ad77a22e5-scripts\") pod \"glance-default-internal-api-0\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.159754 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd351f0d-461e-4d10-9804-36f61fb0ed1d-config-data\") pod \"glance-default-external-api-0\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") " pod="openstack/glance-default-external-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.159798 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.159817 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/bd351f0d-461e-4d10-9804-36f61fb0ed1d-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") " pod="openstack/glance-default-external-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.159833 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/13413141-d97d-4fcc-a854-717ad77a22e5-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.161132 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/13413141-d97d-4fcc-a854-717ad77a22e5-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.161369 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/13413141-d97d-4fcc-a854-717ad77a22e5-logs\") pod \"glance-default-internal-api-0\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.162505 4742 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-internal-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.172244 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13413141-d97d-4fcc-a854-717ad77a22e5-config-data\") pod \"glance-default-internal-api-0\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.173557 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/13413141-d97d-4fcc-a854-717ad77a22e5-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.201428 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13413141-d97d-4fcc-a854-717ad77a22e5-scripts\") pod \"glance-default-internal-api-0\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.205800 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13413141-d97d-4fcc-a854-717ad77a22e5-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.245209 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cb8g7\" (UniqueName: \"kubernetes.io/projected/13413141-d97d-4fcc-a854-717ad77a22e5-kube-api-access-cb8g7\") pod \"glance-default-internal-api-0\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.262613 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd351f0d-461e-4d10-9804-36f61fb0ed1d-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") " pod="openstack/glance-default-external-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.262709 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") " pod="openstack/glance-default-external-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.262749 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd351f0d-461e-4d10-9804-36f61fb0ed1d-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") " pod="openstack/glance-default-external-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.262795 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bd351f0d-461e-4d10-9804-36f61fb0ed1d-logs\") pod \"glance-default-external-api-0\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") " pod="openstack/glance-default-external-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.262834 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lc6n4\" (UniqueName: \"kubernetes.io/projected/bd351f0d-461e-4d10-9804-36f61fb0ed1d-kube-api-access-lc6n4\") pod \"glance-default-external-api-0\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") " pod="openstack/glance-default-external-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.262870 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd351f0d-461e-4d10-9804-36f61fb0ed1d-config-data\") pod \"glance-default-external-api-0\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") " pod="openstack/glance-default-external-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.262931 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/bd351f0d-461e-4d10-9804-36f61fb0ed1d-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") " pod="openstack/glance-default-external-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.263070 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bd351f0d-461e-4d10-9804-36f61fb0ed1d-scripts\") pod \"glance-default-external-api-0\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") " pod="openstack/glance-default-external-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.264309 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bd351f0d-461e-4d10-9804-36f61fb0ed1d-logs\") pod \"glance-default-external-api-0\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") " pod="openstack/glance-default-external-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.265114 4742 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/glance-default-external-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.267050 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bd351f0d-461e-4d10-9804-36f61fb0ed1d-scripts\") pod \"glance-default-external-api-0\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") " pod="openstack/glance-default-external-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.267918 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd351f0d-461e-4d10-9804-36f61fb0ed1d-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") " pod="openstack/glance-default-external-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.268196 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/bd351f0d-461e-4d10-9804-36f61fb0ed1d-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") " pod="openstack/glance-default-external-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.270290 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.274107 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd351f0d-461e-4d10-9804-36f61fb0ed1d-config-data\") pod \"glance-default-external-api-0\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") " pod="openstack/glance-default-external-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.276162 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd351f0d-461e-4d10-9804-36f61fb0ed1d-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") " pod="openstack/glance-default-external-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.289619 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-j7dqz"] Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.298917 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.299804 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lc6n4\" (UniqueName: \"kubernetes.io/projected/bd351f0d-461e-4d10-9804-36f61fb0ed1d-kube-api-access-lc6n4\") pod \"glance-default-external-api-0\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") " pod="openstack/glance-default-external-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.307491 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") " pod="openstack/glance-default-external-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.395826 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.556618 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.563287 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-6dw5l" event={"ID":"3d83f27e-57b2-4ef9-bafc-0e2f023041d0","Type":"ContainerStarted","Data":"e7e04a131329ec48b97ade2755e762fab3c6aca07ebac33bdded7c2512e31b7c"} Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.563520 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-6dw5l" event={"ID":"3d83f27e-57b2-4ef9-bafc-0e2f023041d0","Type":"ContainerStarted","Data":"542d333f2d9575151e7c2b3d97ad049d1bb8a457193668c993a592b6b0a232ec"} Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.568047 4742 generic.go:334] "Generic (PLEG): container finished" podID="6bec9017-c170-4e34-8f40-acd703b59710" containerID="bb610fc125b2f206c6e2f8776d3f34f3a75e803bfcba92a4db60741fd0fb1832" exitCode=0 Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.568150 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bbf5cc879-jttq8" event={"ID":"6bec9017-c170-4e34-8f40-acd703b59710","Type":"ContainerDied","Data":"bb610fc125b2f206c6e2f8776d3f34f3a75e803bfcba92a4db60741fd0fb1832"} Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.568173 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bbf5cc879-jttq8" event={"ID":"6bec9017-c170-4e34-8f40-acd703b59710","Type":"ContainerStarted","Data":"1878cf2dce28048409bb86fc3ae16a1f75dbf158839efdd58d15b2dd58c67658"} Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.578222 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-j7dqz" event={"ID":"4df39ed0-9850-4409-8648-724b15671640","Type":"ContainerStarted","Data":"c43440cb639fe912dee5837305ecde2a280633e7e48632e31f80581235c548ae"} Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.588230 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-6dw5l" podStartSLOduration=2.5882166030000002 podStartE2EDuration="2.588216603s" podCreationTimestamp="2025-12-05 06:11:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:11:36.583791265 +0000 UTC m=+1172.495926327" watchObservedRunningTime="2025-12-05 06:11:36.588216603 +0000 UTC m=+1172.500351675" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.592786 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"388e7953-5819-4978-842c-1cf54fd568c9","Type":"ContainerStarted","Data":"3ab014d5fc00da8e154c84721f06351ebc4410d32d4a0077bf705637af70942b"} Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.594466 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-4svn8" event={"ID":"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1","Type":"ContainerStarted","Data":"e011265f600e54f0669f1087eb17e1e382aede2fbf661e342e9186c867e2fa87"} Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.595641 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-c87xk" event={"ID":"9d56b0e1-cf28-4913-af18-4c13aafc539a","Type":"ContainerStarted","Data":"4c14252e4fd49cea57c985322e4ad4f38df07e8c27f7e24e38b80a1fb1bca49f"} Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.595682 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-c87xk" event={"ID":"9d56b0e1-cf28-4913-af18-4c13aafc539a","Type":"ContainerStarted","Data":"71fd1d5ff137d414736b1d17e8ab2f4a36b8f2992eae0df07ed01b81e60d6f5d"} Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.637171 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-c87xk" podStartSLOduration=2.637155276 podStartE2EDuration="2.637155276s" podCreationTimestamp="2025-12-05 06:11:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:11:36.63618644 +0000 UTC m=+1172.548321512" watchObservedRunningTime="2025-12-05 06:11:36.637155276 +0000 UTC m=+1172.549290328" Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.763143 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 06:11:36 crc kubenswrapper[4742]: I1205 06:11:36.855562 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 06:11:37 crc kubenswrapper[4742]: I1205 06:11:37.118290 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:11:37 crc kubenswrapper[4742]: I1205 06:11:37.197228 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bbf5cc879-jttq8" Dec 05 06:11:37 crc kubenswrapper[4742]: I1205 06:11:37.225113 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-22vpx"] Dec 05 06:11:37 crc kubenswrapper[4742]: I1205 06:11:37.235780 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-6xnkh"] Dec 05 06:11:37 crc kubenswrapper[4742]: I1205 06:11:37.285959 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6bec9017-c170-4e34-8f40-acd703b59710-config\") pod \"6bec9017-c170-4e34-8f40-acd703b59710\" (UID: \"6bec9017-c170-4e34-8f40-acd703b59710\") " Dec 05 06:11:37 crc kubenswrapper[4742]: I1205 06:11:37.285996 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6bec9017-c170-4e34-8f40-acd703b59710-ovsdbserver-sb\") pod \"6bec9017-c170-4e34-8f40-acd703b59710\" (UID: \"6bec9017-c170-4e34-8f40-acd703b59710\") " Dec 05 06:11:37 crc kubenswrapper[4742]: I1205 06:11:37.286013 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6bec9017-c170-4e34-8f40-acd703b59710-dns-svc\") pod \"6bec9017-c170-4e34-8f40-acd703b59710\" (UID: \"6bec9017-c170-4e34-8f40-acd703b59710\") " Dec 05 06:11:37 crc kubenswrapper[4742]: I1205 06:11:37.286124 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6bec9017-c170-4e34-8f40-acd703b59710-dns-swift-storage-0\") pod \"6bec9017-c170-4e34-8f40-acd703b59710\" (UID: \"6bec9017-c170-4e34-8f40-acd703b59710\") " Dec 05 06:11:37 crc kubenswrapper[4742]: I1205 06:11:37.286181 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6bec9017-c170-4e34-8f40-acd703b59710-ovsdbserver-nb\") pod \"6bec9017-c170-4e34-8f40-acd703b59710\" (UID: \"6bec9017-c170-4e34-8f40-acd703b59710\") " Dec 05 06:11:37 crc kubenswrapper[4742]: I1205 06:11:37.286220 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pzgnm\" (UniqueName: \"kubernetes.io/projected/6bec9017-c170-4e34-8f40-acd703b59710-kube-api-access-pzgnm\") pod \"6bec9017-c170-4e34-8f40-acd703b59710\" (UID: \"6bec9017-c170-4e34-8f40-acd703b59710\") " Dec 05 06:11:37 crc kubenswrapper[4742]: I1205 06:11:37.314096 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6bec9017-c170-4e34-8f40-acd703b59710-kube-api-access-pzgnm" (OuterVolumeSpecName: "kube-api-access-pzgnm") pod "6bec9017-c170-4e34-8f40-acd703b59710" (UID: "6bec9017-c170-4e34-8f40-acd703b59710"). InnerVolumeSpecName "kube-api-access-pzgnm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:11:37 crc kubenswrapper[4742]: I1205 06:11:37.315444 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6bec9017-c170-4e34-8f40-acd703b59710-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6bec9017-c170-4e34-8f40-acd703b59710" (UID: "6bec9017-c170-4e34-8f40-acd703b59710"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:11:37 crc kubenswrapper[4742]: I1205 06:11:37.321217 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6bec9017-c170-4e34-8f40-acd703b59710-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6bec9017-c170-4e34-8f40-acd703b59710" (UID: "6bec9017-c170-4e34-8f40-acd703b59710"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:11:37 crc kubenswrapper[4742]: I1205 06:11:37.350670 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6bec9017-c170-4e34-8f40-acd703b59710-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "6bec9017-c170-4e34-8f40-acd703b59710" (UID: "6bec9017-c170-4e34-8f40-acd703b59710"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:11:37 crc kubenswrapper[4742]: I1205 06:11:37.358540 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6bec9017-c170-4e34-8f40-acd703b59710-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6bec9017-c170-4e34-8f40-acd703b59710" (UID: "6bec9017-c170-4e34-8f40-acd703b59710"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:11:37 crc kubenswrapper[4742]: I1205 06:11:37.362038 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6bec9017-c170-4e34-8f40-acd703b59710-config" (OuterVolumeSpecName: "config") pod "6bec9017-c170-4e34-8f40-acd703b59710" (UID: "6bec9017-c170-4e34-8f40-acd703b59710"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:11:37 crc kubenswrapper[4742]: I1205 06:11:37.388018 4742 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6bec9017-c170-4e34-8f40-acd703b59710-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:37 crc kubenswrapper[4742]: I1205 06:11:37.388360 4742 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6bec9017-c170-4e34-8f40-acd703b59710-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:37 crc kubenswrapper[4742]: I1205 06:11:37.388373 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pzgnm\" (UniqueName: \"kubernetes.io/projected/6bec9017-c170-4e34-8f40-acd703b59710-kube-api-access-pzgnm\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:37 crc kubenswrapper[4742]: I1205 06:11:37.388382 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6bec9017-c170-4e34-8f40-acd703b59710-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:37 crc kubenswrapper[4742]: I1205 06:11:37.388392 4742 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6bec9017-c170-4e34-8f40-acd703b59710-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:37 crc kubenswrapper[4742]: I1205 06:11:37.388400 4742 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6bec9017-c170-4e34-8f40-acd703b59710-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:37 crc kubenswrapper[4742]: I1205 06:11:37.469601 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 06:11:37 crc kubenswrapper[4742]: I1205 06:11:37.618446 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"13413141-d97d-4fcc-a854-717ad77a22e5","Type":"ContainerStarted","Data":"ce3050db4f932ac3564e42de6a7853ea6dcc09a81e0c540803b824a0818ef058"} Dec 05 06:11:37 crc kubenswrapper[4742]: I1205 06:11:37.620432 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-22vpx" event={"ID":"1e8bface-3ae8-4a16-85c0-eca434ca57f1","Type":"ContainerStarted","Data":"8b0c8c1bcc964a26b542b7911e1d3db6cc9db4ad3d64c63024775c7227922062"} Dec 05 06:11:37 crc kubenswrapper[4742]: I1205 06:11:37.623078 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bbf5cc879-jttq8" event={"ID":"6bec9017-c170-4e34-8f40-acd703b59710","Type":"ContainerDied","Data":"1878cf2dce28048409bb86fc3ae16a1f75dbf158839efdd58d15b2dd58c67658"} Dec 05 06:11:37 crc kubenswrapper[4742]: I1205 06:11:37.623109 4742 scope.go:117] "RemoveContainer" containerID="bb610fc125b2f206c6e2f8776d3f34f3a75e803bfcba92a4db60741fd0fb1832" Dec 05 06:11:37 crc kubenswrapper[4742]: I1205 06:11:37.623239 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bbf5cc879-jttq8" Dec 05 06:11:37 crc kubenswrapper[4742]: I1205 06:11:37.629595 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-6xnkh" event={"ID":"d4b22256-5998-4055-acd4-6828f54186f8","Type":"ContainerStarted","Data":"d8dafcc2ab495847b39f6f4c197785bb978ee409a35eb660f86d0218720417a4"} Dec 05 06:11:37 crc kubenswrapper[4742]: I1205 06:11:37.759299 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-jttq8"] Dec 05 06:11:37 crc kubenswrapper[4742]: I1205 06:11:37.768671 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-jttq8"] Dec 05 06:11:38 crc kubenswrapper[4742]: I1205 06:11:38.064600 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 06:11:38 crc kubenswrapper[4742]: W1205 06:11:38.077005 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbd351f0d_461e_4d10_9804_36f61fb0ed1d.slice/crio-81f40442d3559e270714681d060bd47dc1d4d02ddcd0c1b8e728293bf7ddb402 WatchSource:0}: Error finding container 81f40442d3559e270714681d060bd47dc1d4d02ddcd0c1b8e728293bf7ddb402: Status 404 returned error can't find the container with id 81f40442d3559e270714681d060bd47dc1d4d02ddcd0c1b8e728293bf7ddb402 Dec 05 06:11:38 crc kubenswrapper[4742]: I1205 06:11:38.414584 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6bec9017-c170-4e34-8f40-acd703b59710" path="/var/lib/kubelet/pods/6bec9017-c170-4e34-8f40-acd703b59710/volumes" Dec 05 06:11:38 crc kubenswrapper[4742]: I1205 06:11:38.659421 4742 generic.go:334] "Generic (PLEG): container finished" podID="d4b22256-5998-4055-acd4-6828f54186f8" containerID="92030834cb232289e8d37dfec9a1c8db6df6dc07deb8302e24edfdfeb74517fa" exitCode=0 Dec 05 06:11:38 crc kubenswrapper[4742]: I1205 06:11:38.659774 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-6xnkh" event={"ID":"d4b22256-5998-4055-acd4-6828f54186f8","Type":"ContainerDied","Data":"92030834cb232289e8d37dfec9a1c8db6df6dc07deb8302e24edfdfeb74517fa"} Dec 05 06:11:38 crc kubenswrapper[4742]: I1205 06:11:38.663697 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"13413141-d97d-4fcc-a854-717ad77a22e5","Type":"ContainerStarted","Data":"1aa5e3de4ab273044e02034d67d28d99928791894b82f244c4fee30405679838"} Dec 05 06:11:38 crc kubenswrapper[4742]: I1205 06:11:38.670410 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"bd351f0d-461e-4d10-9804-36f61fb0ed1d","Type":"ContainerStarted","Data":"81f40442d3559e270714681d060bd47dc1d4d02ddcd0c1b8e728293bf7ddb402"} Dec 05 06:11:39 crc kubenswrapper[4742]: I1205 06:11:39.684034 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"bd351f0d-461e-4d10-9804-36f61fb0ed1d","Type":"ContainerStarted","Data":"82be5802cfde6ab8b8b021b37ae8197e7c5b5e2e4458917e6cf9e96c6446e0f0"} Dec 05 06:11:39 crc kubenswrapper[4742]: I1205 06:11:39.688820 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-6xnkh" event={"ID":"d4b22256-5998-4055-acd4-6828f54186f8","Type":"ContainerStarted","Data":"9eff3ddcc392c899c5f8d811e5f85f3a34be1babc52bc43ed880f88ea93b5636"} Dec 05 06:11:39 crc kubenswrapper[4742]: I1205 06:11:39.688969 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-56df8fb6b7-6xnkh" Dec 05 06:11:39 crc kubenswrapper[4742]: I1205 06:11:39.692246 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"13413141-d97d-4fcc-a854-717ad77a22e5","Type":"ContainerStarted","Data":"beef9bc9b842050cb484ac25c7ce84a788c5de4601c38c5aad3e68be7bb82d52"} Dec 05 06:11:39 crc kubenswrapper[4742]: I1205 06:11:39.692365 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="13413141-d97d-4fcc-a854-717ad77a22e5" containerName="glance-log" containerID="cri-o://1aa5e3de4ab273044e02034d67d28d99928791894b82f244c4fee30405679838" gracePeriod=30 Dec 05 06:11:39 crc kubenswrapper[4742]: I1205 06:11:39.692423 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="13413141-d97d-4fcc-a854-717ad77a22e5" containerName="glance-httpd" containerID="cri-o://beef9bc9b842050cb484ac25c7ce84a788c5de4601c38c5aad3e68be7bb82d52" gracePeriod=30 Dec 05 06:11:39 crc kubenswrapper[4742]: I1205 06:11:39.714127 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-56df8fb6b7-6xnkh" podStartSLOduration=4.714109236 podStartE2EDuration="4.714109236s" podCreationTimestamp="2025-12-05 06:11:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:11:39.708470076 +0000 UTC m=+1175.620605158" watchObservedRunningTime="2025-12-05 06:11:39.714109236 +0000 UTC m=+1175.626244298" Dec 05 06:11:39 crc kubenswrapper[4742]: I1205 06:11:39.733740 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.733720559 podStartE2EDuration="5.733720559s" podCreationTimestamp="2025-12-05 06:11:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:11:39.726137647 +0000 UTC m=+1175.638272709" watchObservedRunningTime="2025-12-05 06:11:39.733720559 +0000 UTC m=+1175.645855621" Dec 05 06:11:40 crc kubenswrapper[4742]: I1205 06:11:40.703862 4742 generic.go:334] "Generic (PLEG): container finished" podID="13413141-d97d-4fcc-a854-717ad77a22e5" containerID="beef9bc9b842050cb484ac25c7ce84a788c5de4601c38c5aad3e68be7bb82d52" exitCode=0 Dec 05 06:11:40 crc kubenswrapper[4742]: I1205 06:11:40.704156 4742 generic.go:334] "Generic (PLEG): container finished" podID="13413141-d97d-4fcc-a854-717ad77a22e5" containerID="1aa5e3de4ab273044e02034d67d28d99928791894b82f244c4fee30405679838" exitCode=143 Dec 05 06:11:40 crc kubenswrapper[4742]: I1205 06:11:40.704785 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"13413141-d97d-4fcc-a854-717ad77a22e5","Type":"ContainerDied","Data":"beef9bc9b842050cb484ac25c7ce84a788c5de4601c38c5aad3e68be7bb82d52"} Dec 05 06:11:40 crc kubenswrapper[4742]: I1205 06:11:40.704843 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"13413141-d97d-4fcc-a854-717ad77a22e5","Type":"ContainerDied","Data":"1aa5e3de4ab273044e02034d67d28d99928791894b82f244c4fee30405679838"} Dec 05 06:11:41 crc kubenswrapper[4742]: I1205 06:11:41.536230 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 06:11:41 crc kubenswrapper[4742]: I1205 06:11:41.718727 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13413141-d97d-4fcc-a854-717ad77a22e5-combined-ca-bundle\") pod \"13413141-d97d-4fcc-a854-717ad77a22e5\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") " Dec 05 06:11:41 crc kubenswrapper[4742]: I1205 06:11:41.719302 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/13413141-d97d-4fcc-a854-717ad77a22e5-httpd-run\") pod \"13413141-d97d-4fcc-a854-717ad77a22e5\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") " Dec 05 06:11:41 crc kubenswrapper[4742]: I1205 06:11:41.719350 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"13413141-d97d-4fcc-a854-717ad77a22e5\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") " Dec 05 06:11:41 crc kubenswrapper[4742]: I1205 06:11:41.719400 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13413141-d97d-4fcc-a854-717ad77a22e5-scripts\") pod \"13413141-d97d-4fcc-a854-717ad77a22e5\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") " Dec 05 06:11:41 crc kubenswrapper[4742]: I1205 06:11:41.719510 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/13413141-d97d-4fcc-a854-717ad77a22e5-logs\") pod \"13413141-d97d-4fcc-a854-717ad77a22e5\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") " Dec 05 06:11:41 crc kubenswrapper[4742]: I1205 06:11:41.719578 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cb8g7\" (UniqueName: \"kubernetes.io/projected/13413141-d97d-4fcc-a854-717ad77a22e5-kube-api-access-cb8g7\") pod \"13413141-d97d-4fcc-a854-717ad77a22e5\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") " Dec 05 06:11:41 crc kubenswrapper[4742]: I1205 06:11:41.719602 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/13413141-d97d-4fcc-a854-717ad77a22e5-internal-tls-certs\") pod \"13413141-d97d-4fcc-a854-717ad77a22e5\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") " Dec 05 06:11:41 crc kubenswrapper[4742]: I1205 06:11:41.719666 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13413141-d97d-4fcc-a854-717ad77a22e5-config-data\") pod \"13413141-d97d-4fcc-a854-717ad77a22e5\" (UID: \"13413141-d97d-4fcc-a854-717ad77a22e5\") " Dec 05 06:11:41 crc kubenswrapper[4742]: I1205 06:11:41.720826 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13413141-d97d-4fcc-a854-717ad77a22e5-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "13413141-d97d-4fcc-a854-717ad77a22e5" (UID: "13413141-d97d-4fcc-a854-717ad77a22e5"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:11:41 crc kubenswrapper[4742]: I1205 06:11:41.721458 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13413141-d97d-4fcc-a854-717ad77a22e5-logs" (OuterVolumeSpecName: "logs") pod "13413141-d97d-4fcc-a854-717ad77a22e5" (UID: "13413141-d97d-4fcc-a854-717ad77a22e5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:11:41 crc kubenswrapper[4742]: I1205 06:11:41.726173 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13413141-d97d-4fcc-a854-717ad77a22e5-kube-api-access-cb8g7" (OuterVolumeSpecName: "kube-api-access-cb8g7") pod "13413141-d97d-4fcc-a854-717ad77a22e5" (UID: "13413141-d97d-4fcc-a854-717ad77a22e5"). InnerVolumeSpecName "kube-api-access-cb8g7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:11:41 crc kubenswrapper[4742]: I1205 06:11:41.726368 4742 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/13413141-d97d-4fcc-a854-717ad77a22e5-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:41 crc kubenswrapper[4742]: I1205 06:11:41.726402 4742 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/13413141-d97d-4fcc-a854-717ad77a22e5-logs\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:41 crc kubenswrapper[4742]: I1205 06:11:41.727237 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "13413141-d97d-4fcc-a854-717ad77a22e5" (UID: "13413141-d97d-4fcc-a854-717ad77a22e5"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 06:11:41 crc kubenswrapper[4742]: I1205 06:11:41.735282 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13413141-d97d-4fcc-a854-717ad77a22e5-scripts" (OuterVolumeSpecName: "scripts") pod "13413141-d97d-4fcc-a854-717ad77a22e5" (UID: "13413141-d97d-4fcc-a854-717ad77a22e5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:11:41 crc kubenswrapper[4742]: I1205 06:11:41.738322 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"13413141-d97d-4fcc-a854-717ad77a22e5","Type":"ContainerDied","Data":"ce3050db4f932ac3564e42de6a7853ea6dcc09a81e0c540803b824a0818ef058"} Dec 05 06:11:41 crc kubenswrapper[4742]: I1205 06:11:41.738396 4742 scope.go:117] "RemoveContainer" containerID="beef9bc9b842050cb484ac25c7ce84a788c5de4601c38c5aad3e68be7bb82d52" Dec 05 06:11:41 crc kubenswrapper[4742]: I1205 06:11:41.738560 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 06:11:41 crc kubenswrapper[4742]: I1205 06:11:41.741002 4742 generic.go:334] "Generic (PLEG): container finished" podID="3d83f27e-57b2-4ef9-bafc-0e2f023041d0" containerID="e7e04a131329ec48b97ade2755e762fab3c6aca07ebac33bdded7c2512e31b7c" exitCode=0 Dec 05 06:11:41 crc kubenswrapper[4742]: I1205 06:11:41.741037 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-6dw5l" event={"ID":"3d83f27e-57b2-4ef9-bafc-0e2f023041d0","Type":"ContainerDied","Data":"e7e04a131329ec48b97ade2755e762fab3c6aca07ebac33bdded7c2512e31b7c"} Dec 05 06:11:41 crc kubenswrapper[4742]: I1205 06:11:41.772541 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13413141-d97d-4fcc-a854-717ad77a22e5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "13413141-d97d-4fcc-a854-717ad77a22e5" (UID: "13413141-d97d-4fcc-a854-717ad77a22e5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:11:41 crc kubenswrapper[4742]: I1205 06:11:41.820430 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13413141-d97d-4fcc-a854-717ad77a22e5-config-data" (OuterVolumeSpecName: "config-data") pod "13413141-d97d-4fcc-a854-717ad77a22e5" (UID: "13413141-d97d-4fcc-a854-717ad77a22e5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:11:41 crc kubenswrapper[4742]: I1205 06:11:41.820531 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13413141-d97d-4fcc-a854-717ad77a22e5-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "13413141-d97d-4fcc-a854-717ad77a22e5" (UID: "13413141-d97d-4fcc-a854-717ad77a22e5"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:11:41 crc kubenswrapper[4742]: I1205 06:11:41.831047 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cb8g7\" (UniqueName: \"kubernetes.io/projected/13413141-d97d-4fcc-a854-717ad77a22e5-kube-api-access-cb8g7\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:41 crc kubenswrapper[4742]: I1205 06:11:41.831100 4742 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/13413141-d97d-4fcc-a854-717ad77a22e5-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:41 crc kubenswrapper[4742]: I1205 06:11:41.831111 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13413141-d97d-4fcc-a854-717ad77a22e5-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:41 crc kubenswrapper[4742]: I1205 06:11:41.831125 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13413141-d97d-4fcc-a854-717ad77a22e5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:41 crc kubenswrapper[4742]: I1205 06:11:41.831159 4742 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Dec 05 06:11:41 crc kubenswrapper[4742]: I1205 06:11:41.831170 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/13413141-d97d-4fcc-a854-717ad77a22e5-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:41 crc kubenswrapper[4742]: I1205 06:11:41.848785 4742 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Dec 05 06:11:41 crc kubenswrapper[4742]: I1205 06:11:41.932469 4742 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.078106 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.104525 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.114357 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 06:11:42 crc kubenswrapper[4742]: E1205 06:11:42.114671 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13413141-d97d-4fcc-a854-717ad77a22e5" containerName="glance-log" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.114682 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="13413141-d97d-4fcc-a854-717ad77a22e5" containerName="glance-log" Dec 05 06:11:42 crc kubenswrapper[4742]: E1205 06:11:42.114709 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13413141-d97d-4fcc-a854-717ad77a22e5" containerName="glance-httpd" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.114716 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="13413141-d97d-4fcc-a854-717ad77a22e5" containerName="glance-httpd" Dec 05 06:11:42 crc kubenswrapper[4742]: E1205 06:11:42.114728 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bec9017-c170-4e34-8f40-acd703b59710" containerName="init" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.114734 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bec9017-c170-4e34-8f40-acd703b59710" containerName="init" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.115050 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="13413141-d97d-4fcc-a854-717ad77a22e5" containerName="glance-httpd" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.115447 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="6bec9017-c170-4e34-8f40-acd703b59710" containerName="init" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.115480 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="13413141-d97d-4fcc-a854-717ad77a22e5" containerName="glance-log" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.116525 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.120630 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.120923 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.133084 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 06:11:42 crc kubenswrapper[4742]: E1205 06:11:42.193349 4742 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod13413141_d97d_4fcc_a854_717ad77a22e5.slice\": RecentStats: unable to find data in memory cache]" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.238528 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-logs\") pod \"glance-default-internal-api-0\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.238586 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.238622 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.238656 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.238743 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.238770 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thkmw\" (UniqueName: \"kubernetes.io/projected/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-kube-api-access-thkmw\") pod \"glance-default-internal-api-0\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.239088 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-config-data\") pod \"glance-default-internal-api-0\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.239125 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-scripts\") pod \"glance-default-internal-api-0\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.343112 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.343185 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thkmw\" (UniqueName: \"kubernetes.io/projected/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-kube-api-access-thkmw\") pod \"glance-default-internal-api-0\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.343355 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-config-data\") pod \"glance-default-internal-api-0\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.343667 4742 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-internal-api-0" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.345162 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-scripts\") pod \"glance-default-internal-api-0\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.345234 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-logs\") pod \"glance-default-internal-api-0\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.345264 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.345318 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.345745 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-logs\") pod \"glance-default-internal-api-0\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.345814 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.348180 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.354348 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.354615 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-config-data\") pod \"glance-default-internal-api-0\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.354762 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.359648 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-scripts\") pod \"glance-default-internal-api-0\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.362729 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thkmw\" (UniqueName: \"kubernetes.io/projected/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-kube-api-access-thkmw\") pod \"glance-default-internal-api-0\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.386150 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.401415 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13413141-d97d-4fcc-a854-717ad77a22e5" path="/var/lib/kubelet/pods/13413141-d97d-4fcc-a854-717ad77a22e5/volumes" Dec 05 06:11:42 crc kubenswrapper[4742]: I1205 06:11:42.447284 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 06:11:45 crc kubenswrapper[4742]: I1205 06:11:45.637256 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-56df8fb6b7-6xnkh" Dec 05 06:11:45 crc kubenswrapper[4742]: I1205 06:11:45.719739 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-mlw4h"] Dec 05 06:11:45 crc kubenswrapper[4742]: I1205 06:11:45.720097 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" podUID="9979f7f7-778c-47f3-8263-d3d93753e714" containerName="dnsmasq-dns" containerID="cri-o://36120690a82b6f681780c91a20a3cc8a0a413e14252b3db344b7e688541ec8f6" gracePeriod=10 Dec 05 06:11:46 crc kubenswrapper[4742]: I1205 06:11:46.494470 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-6dw5l" Dec 05 06:11:46 crc kubenswrapper[4742]: I1205 06:11:46.632809 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-credential-keys\") pod \"3d83f27e-57b2-4ef9-bafc-0e2f023041d0\" (UID: \"3d83f27e-57b2-4ef9-bafc-0e2f023041d0\") " Dec 05 06:11:46 crc kubenswrapper[4742]: I1205 06:11:46.632920 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-config-data\") pod \"3d83f27e-57b2-4ef9-bafc-0e2f023041d0\" (UID: \"3d83f27e-57b2-4ef9-bafc-0e2f023041d0\") " Dec 05 06:11:46 crc kubenswrapper[4742]: I1205 06:11:46.632954 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-fernet-keys\") pod \"3d83f27e-57b2-4ef9-bafc-0e2f023041d0\" (UID: \"3d83f27e-57b2-4ef9-bafc-0e2f023041d0\") " Dec 05 06:11:46 crc kubenswrapper[4742]: I1205 06:11:46.633011 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-combined-ca-bundle\") pod \"3d83f27e-57b2-4ef9-bafc-0e2f023041d0\" (UID: \"3d83f27e-57b2-4ef9-bafc-0e2f023041d0\") " Dec 05 06:11:46 crc kubenswrapper[4742]: I1205 06:11:46.633118 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7jrjr\" (UniqueName: \"kubernetes.io/projected/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-kube-api-access-7jrjr\") pod \"3d83f27e-57b2-4ef9-bafc-0e2f023041d0\" (UID: \"3d83f27e-57b2-4ef9-bafc-0e2f023041d0\") " Dec 05 06:11:46 crc kubenswrapper[4742]: I1205 06:11:46.633237 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-scripts\") pod \"3d83f27e-57b2-4ef9-bafc-0e2f023041d0\" (UID: \"3d83f27e-57b2-4ef9-bafc-0e2f023041d0\") " Dec 05 06:11:46 crc kubenswrapper[4742]: I1205 06:11:46.657836 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "3d83f27e-57b2-4ef9-bafc-0e2f023041d0" (UID: "3d83f27e-57b2-4ef9-bafc-0e2f023041d0"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:11:46 crc kubenswrapper[4742]: I1205 06:11:46.657869 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-kube-api-access-7jrjr" (OuterVolumeSpecName: "kube-api-access-7jrjr") pod "3d83f27e-57b2-4ef9-bafc-0e2f023041d0" (UID: "3d83f27e-57b2-4ef9-bafc-0e2f023041d0"). InnerVolumeSpecName "kube-api-access-7jrjr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:11:46 crc kubenswrapper[4742]: I1205 06:11:46.657968 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "3d83f27e-57b2-4ef9-bafc-0e2f023041d0" (UID: "3d83f27e-57b2-4ef9-bafc-0e2f023041d0"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:11:46 crc kubenswrapper[4742]: I1205 06:11:46.659073 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-scripts" (OuterVolumeSpecName: "scripts") pod "3d83f27e-57b2-4ef9-bafc-0e2f023041d0" (UID: "3d83f27e-57b2-4ef9-bafc-0e2f023041d0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:11:46 crc kubenswrapper[4742]: I1205 06:11:46.663836 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-config-data" (OuterVolumeSpecName: "config-data") pod "3d83f27e-57b2-4ef9-bafc-0e2f023041d0" (UID: "3d83f27e-57b2-4ef9-bafc-0e2f023041d0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:11:46 crc kubenswrapper[4742]: I1205 06:11:46.670394 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3d83f27e-57b2-4ef9-bafc-0e2f023041d0" (UID: "3d83f27e-57b2-4ef9-bafc-0e2f023041d0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:11:46 crc kubenswrapper[4742]: I1205 06:11:46.670859 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:11:46 crc kubenswrapper[4742]: I1205 06:11:46.670915 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:11:46 crc kubenswrapper[4742]: I1205 06:11:46.670965 4742 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" Dec 05 06:11:46 crc kubenswrapper[4742]: I1205 06:11:46.671725 4742 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"310cca7f57f78facafa7379a55640dd8bda7651e6fa10b0fa067a67c3dc118ef"} pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 06:11:46 crc kubenswrapper[4742]: I1205 06:11:46.672047 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" containerID="cri-o://310cca7f57f78facafa7379a55640dd8bda7651e6fa10b0fa067a67c3dc118ef" gracePeriod=600 Dec 05 06:11:46 crc kubenswrapper[4742]: I1205 06:11:46.734792 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:46 crc kubenswrapper[4742]: I1205 06:11:46.734827 4742 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:46 crc kubenswrapper[4742]: I1205 06:11:46.734838 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:46 crc kubenswrapper[4742]: I1205 06:11:46.734848 4742 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:46 crc kubenswrapper[4742]: I1205 06:11:46.734856 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:46 crc kubenswrapper[4742]: I1205 06:11:46.734865 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7jrjr\" (UniqueName: \"kubernetes.io/projected/3d83f27e-57b2-4ef9-bafc-0e2f023041d0-kube-api-access-7jrjr\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:46 crc kubenswrapper[4742]: I1205 06:11:46.798455 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-6dw5l" Dec 05 06:11:46 crc kubenswrapper[4742]: I1205 06:11:46.798467 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-6dw5l" event={"ID":"3d83f27e-57b2-4ef9-bafc-0e2f023041d0","Type":"ContainerDied","Data":"542d333f2d9575151e7c2b3d97ad049d1bb8a457193668c993a592b6b0a232ec"} Dec 05 06:11:46 crc kubenswrapper[4742]: I1205 06:11:46.798539 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="542d333f2d9575151e7c2b3d97ad049d1bb8a457193668c993a592b6b0a232ec" Dec 05 06:11:46 crc kubenswrapper[4742]: I1205 06:11:46.800831 4742 generic.go:334] "Generic (PLEG): container finished" podID="9979f7f7-778c-47f3-8263-d3d93753e714" containerID="36120690a82b6f681780c91a20a3cc8a0a413e14252b3db344b7e688541ec8f6" exitCode=0 Dec 05 06:11:46 crc kubenswrapper[4742]: I1205 06:11:46.800867 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" event={"ID":"9979f7f7-778c-47f3-8263-d3d93753e714","Type":"ContainerDied","Data":"36120690a82b6f681780c91a20a3cc8a0a413e14252b3db344b7e688541ec8f6"} Dec 05 06:11:47 crc kubenswrapper[4742]: I1205 06:11:47.599118 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-6dw5l"] Dec 05 06:11:47 crc kubenswrapper[4742]: I1205 06:11:47.607447 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-6dw5l"] Dec 05 06:11:47 crc kubenswrapper[4742]: I1205 06:11:47.694811 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-r2qnr"] Dec 05 06:11:47 crc kubenswrapper[4742]: E1205 06:11:47.695181 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d83f27e-57b2-4ef9-bafc-0e2f023041d0" containerName="keystone-bootstrap" Dec 05 06:11:47 crc kubenswrapper[4742]: I1205 06:11:47.695196 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d83f27e-57b2-4ef9-bafc-0e2f023041d0" containerName="keystone-bootstrap" Dec 05 06:11:47 crc kubenswrapper[4742]: I1205 06:11:47.695359 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d83f27e-57b2-4ef9-bafc-0e2f023041d0" containerName="keystone-bootstrap" Dec 05 06:11:47 crc kubenswrapper[4742]: I1205 06:11:47.695913 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-r2qnr" Dec 05 06:11:47 crc kubenswrapper[4742]: I1205 06:11:47.698246 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 06:11:47 crc kubenswrapper[4742]: I1205 06:11:47.698407 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 06:11:47 crc kubenswrapper[4742]: I1205 06:11:47.698461 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 06:11:47 crc kubenswrapper[4742]: I1205 06:11:47.698506 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-dbvnt" Dec 05 06:11:47 crc kubenswrapper[4742]: I1205 06:11:47.698572 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 06:11:47 crc kubenswrapper[4742]: I1205 06:11:47.703351 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-r2qnr"] Dec 05 06:11:47 crc kubenswrapper[4742]: I1205 06:11:47.760926 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-config-data\") pod \"keystone-bootstrap-r2qnr\" (UID: \"3e2f2b5a-8ced-49df-ae20-f64d13a9938b\") " pod="openstack/keystone-bootstrap-r2qnr" Dec 05 06:11:47 crc kubenswrapper[4742]: I1205 06:11:47.761034 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-credential-keys\") pod \"keystone-bootstrap-r2qnr\" (UID: \"3e2f2b5a-8ced-49df-ae20-f64d13a9938b\") " pod="openstack/keystone-bootstrap-r2qnr" Dec 05 06:11:47 crc kubenswrapper[4742]: I1205 06:11:47.761194 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gb9c7\" (UniqueName: \"kubernetes.io/projected/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-kube-api-access-gb9c7\") pod \"keystone-bootstrap-r2qnr\" (UID: \"3e2f2b5a-8ced-49df-ae20-f64d13a9938b\") " pod="openstack/keystone-bootstrap-r2qnr" Dec 05 06:11:47 crc kubenswrapper[4742]: I1205 06:11:47.761284 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-combined-ca-bundle\") pod \"keystone-bootstrap-r2qnr\" (UID: \"3e2f2b5a-8ced-49df-ae20-f64d13a9938b\") " pod="openstack/keystone-bootstrap-r2qnr" Dec 05 06:11:47 crc kubenswrapper[4742]: I1205 06:11:47.761338 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-fernet-keys\") pod \"keystone-bootstrap-r2qnr\" (UID: \"3e2f2b5a-8ced-49df-ae20-f64d13a9938b\") " pod="openstack/keystone-bootstrap-r2qnr" Dec 05 06:11:47 crc kubenswrapper[4742]: I1205 06:11:47.761383 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-scripts\") pod \"keystone-bootstrap-r2qnr\" (UID: \"3e2f2b5a-8ced-49df-ae20-f64d13a9938b\") " pod="openstack/keystone-bootstrap-r2qnr" Dec 05 06:11:47 crc kubenswrapper[4742]: I1205 06:11:47.812660 4742 generic.go:334] "Generic (PLEG): container finished" podID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerID="310cca7f57f78facafa7379a55640dd8bda7651e6fa10b0fa067a67c3dc118ef" exitCode=0 Dec 05 06:11:47 crc kubenswrapper[4742]: I1205 06:11:47.812742 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerDied","Data":"310cca7f57f78facafa7379a55640dd8bda7651e6fa10b0fa067a67c3dc118ef"} Dec 05 06:11:47 crc kubenswrapper[4742]: I1205 06:11:47.863559 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-config-data\") pod \"keystone-bootstrap-r2qnr\" (UID: \"3e2f2b5a-8ced-49df-ae20-f64d13a9938b\") " pod="openstack/keystone-bootstrap-r2qnr" Dec 05 06:11:47 crc kubenswrapper[4742]: I1205 06:11:47.863635 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-credential-keys\") pod \"keystone-bootstrap-r2qnr\" (UID: \"3e2f2b5a-8ced-49df-ae20-f64d13a9938b\") " pod="openstack/keystone-bootstrap-r2qnr" Dec 05 06:11:47 crc kubenswrapper[4742]: I1205 06:11:47.863666 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gb9c7\" (UniqueName: \"kubernetes.io/projected/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-kube-api-access-gb9c7\") pod \"keystone-bootstrap-r2qnr\" (UID: \"3e2f2b5a-8ced-49df-ae20-f64d13a9938b\") " pod="openstack/keystone-bootstrap-r2qnr" Dec 05 06:11:47 crc kubenswrapper[4742]: I1205 06:11:47.863702 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-combined-ca-bundle\") pod \"keystone-bootstrap-r2qnr\" (UID: \"3e2f2b5a-8ced-49df-ae20-f64d13a9938b\") " pod="openstack/keystone-bootstrap-r2qnr" Dec 05 06:11:47 crc kubenswrapper[4742]: I1205 06:11:47.863729 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-fernet-keys\") pod \"keystone-bootstrap-r2qnr\" (UID: \"3e2f2b5a-8ced-49df-ae20-f64d13a9938b\") " pod="openstack/keystone-bootstrap-r2qnr" Dec 05 06:11:47 crc kubenswrapper[4742]: I1205 06:11:47.863744 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-scripts\") pod \"keystone-bootstrap-r2qnr\" (UID: \"3e2f2b5a-8ced-49df-ae20-f64d13a9938b\") " pod="openstack/keystone-bootstrap-r2qnr" Dec 05 06:11:47 crc kubenswrapper[4742]: I1205 06:11:47.868712 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-fernet-keys\") pod \"keystone-bootstrap-r2qnr\" (UID: \"3e2f2b5a-8ced-49df-ae20-f64d13a9938b\") " pod="openstack/keystone-bootstrap-r2qnr" Dec 05 06:11:47 crc kubenswrapper[4742]: I1205 06:11:47.869140 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-scripts\") pod \"keystone-bootstrap-r2qnr\" (UID: \"3e2f2b5a-8ced-49df-ae20-f64d13a9938b\") " pod="openstack/keystone-bootstrap-r2qnr" Dec 05 06:11:47 crc kubenswrapper[4742]: I1205 06:11:47.869243 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-config-data\") pod \"keystone-bootstrap-r2qnr\" (UID: \"3e2f2b5a-8ced-49df-ae20-f64d13a9938b\") " pod="openstack/keystone-bootstrap-r2qnr" Dec 05 06:11:47 crc kubenswrapper[4742]: I1205 06:11:47.869810 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-combined-ca-bundle\") pod \"keystone-bootstrap-r2qnr\" (UID: \"3e2f2b5a-8ced-49df-ae20-f64d13a9938b\") " pod="openstack/keystone-bootstrap-r2qnr" Dec 05 06:11:47 crc kubenswrapper[4742]: I1205 06:11:47.870583 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-credential-keys\") pod \"keystone-bootstrap-r2qnr\" (UID: \"3e2f2b5a-8ced-49df-ae20-f64d13a9938b\") " pod="openstack/keystone-bootstrap-r2qnr" Dec 05 06:11:47 crc kubenswrapper[4742]: I1205 06:11:47.885805 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gb9c7\" (UniqueName: \"kubernetes.io/projected/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-kube-api-access-gb9c7\") pod \"keystone-bootstrap-r2qnr\" (UID: \"3e2f2b5a-8ced-49df-ae20-f64d13a9938b\") " pod="openstack/keystone-bootstrap-r2qnr" Dec 05 06:11:48 crc kubenswrapper[4742]: I1205 06:11:48.014697 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-r2qnr" Dec 05 06:11:48 crc kubenswrapper[4742]: I1205 06:11:48.394347 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d83f27e-57b2-4ef9-bafc-0e2f023041d0" path="/var/lib/kubelet/pods/3d83f27e-57b2-4ef9-bafc-0e2f023041d0/volumes" Dec 05 06:11:52 crc kubenswrapper[4742]: I1205 06:11:52.096772 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" podUID="9979f7f7-778c-47f3-8263-d3d93753e714" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.128:5353: i/o timeout" Dec 05 06:11:55 crc kubenswrapper[4742]: I1205 06:11:55.893384 4742 generic.go:334] "Generic (PLEG): container finished" podID="9d56b0e1-cf28-4913-af18-4c13aafc539a" containerID="4c14252e4fd49cea57c985322e4ad4f38df07e8c27f7e24e38b80a1fb1bca49f" exitCode=0 Dec 05 06:11:55 crc kubenswrapper[4742]: I1205 06:11:55.893556 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-c87xk" event={"ID":"9d56b0e1-cf28-4913-af18-4c13aafc539a","Type":"ContainerDied","Data":"4c14252e4fd49cea57c985322e4ad4f38df07e8c27f7e24e38b80a1fb1bca49f"} Dec 05 06:11:56 crc kubenswrapper[4742]: E1205 06:11:56.870486 4742 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Dec 05 06:11:56 crc kubenswrapper[4742]: E1205 06:11:56.870945 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-q2cdd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-22vpx_openstack(1e8bface-3ae8-4a16-85c0-eca434ca57f1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 06:11:56 crc kubenswrapper[4742]: I1205 06:11:56.871573 4742 scope.go:117] "RemoveContainer" containerID="1aa5e3de4ab273044e02034d67d28d99928791894b82f244c4fee30405679838" Dec 05 06:11:56 crc kubenswrapper[4742]: E1205 06:11:56.872012 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-22vpx" podUID="1e8bface-3ae8-4a16-85c0-eca434ca57f1" Dec 05 06:11:56 crc kubenswrapper[4742]: E1205 06:11:56.907666 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-22vpx" podUID="1e8bface-3ae8-4a16-85c0-eca434ca57f1" Dec 05 06:11:57 crc kubenswrapper[4742]: I1205 06:11:57.101905 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" podUID="9979f7f7-778c-47f3-8263-d3d93753e714" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.128:5353: i/o timeout" Dec 05 06:11:57 crc kubenswrapper[4742]: I1205 06:11:57.920879 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" event={"ID":"9979f7f7-778c-47f3-8263-d3d93753e714","Type":"ContainerDied","Data":"a6a15ef699febe4ef315506f1e0c57f62da9176bd3f997cd38dfbe031a49d873"} Dec 05 06:11:57 crc kubenswrapper[4742]: I1205 06:11:57.921186 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a6a15ef699febe4ef315506f1e0c57f62da9176bd3f997cd38dfbe031a49d873" Dec 05 06:11:57 crc kubenswrapper[4742]: I1205 06:11:57.942035 4742 scope.go:117] "RemoveContainer" containerID="6c6428d248edc5bf49d6ecdb41f4e3135ccfb37d799e38e42171af4f0f46c67b" Dec 05 06:11:57 crc kubenswrapper[4742]: E1205 06:11:57.947010 4742 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Dec 05 06:11:57 crc kubenswrapper[4742]: E1205 06:11:57.947621 4742 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-l5vnp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-4svn8_openstack(b0730438-d5e9-48c6-b5d1-280b1fb0f4b1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 06:11:57 crc kubenswrapper[4742]: E1205 06:11:57.949113 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-4svn8" podUID="b0730438-d5e9-48c6-b5d1-280b1fb0f4b1" Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.195927 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.233867 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-c87xk" Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.302471 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9979f7f7-778c-47f3-8263-d3d93753e714-dns-swift-storage-0\") pod \"9979f7f7-778c-47f3-8263-d3d93753e714\" (UID: \"9979f7f7-778c-47f3-8263-d3d93753e714\") " Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.302536 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9979f7f7-778c-47f3-8263-d3d93753e714-dns-svc\") pod \"9979f7f7-778c-47f3-8263-d3d93753e714\" (UID: \"9979f7f7-778c-47f3-8263-d3d93753e714\") " Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.302586 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9979f7f7-778c-47f3-8263-d3d93753e714-ovsdbserver-sb\") pod \"9979f7f7-778c-47f3-8263-d3d93753e714\" (UID: \"9979f7f7-778c-47f3-8263-d3d93753e714\") " Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.302662 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lsw9v\" (UniqueName: \"kubernetes.io/projected/9979f7f7-778c-47f3-8263-d3d93753e714-kube-api-access-lsw9v\") pod \"9979f7f7-778c-47f3-8263-d3d93753e714\" (UID: \"9979f7f7-778c-47f3-8263-d3d93753e714\") " Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.302706 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9979f7f7-778c-47f3-8263-d3d93753e714-ovsdbserver-nb\") pod \"9979f7f7-778c-47f3-8263-d3d93753e714\" (UID: \"9979f7f7-778c-47f3-8263-d3d93753e714\") " Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.302774 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9979f7f7-778c-47f3-8263-d3d93753e714-config\") pod \"9979f7f7-778c-47f3-8263-d3d93753e714\" (UID: \"9979f7f7-778c-47f3-8263-d3d93753e714\") " Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.308161 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9979f7f7-778c-47f3-8263-d3d93753e714-kube-api-access-lsw9v" (OuterVolumeSpecName: "kube-api-access-lsw9v") pod "9979f7f7-778c-47f3-8263-d3d93753e714" (UID: "9979f7f7-778c-47f3-8263-d3d93753e714"). InnerVolumeSpecName "kube-api-access-lsw9v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.373034 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9979f7f7-778c-47f3-8263-d3d93753e714-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9979f7f7-778c-47f3-8263-d3d93753e714" (UID: "9979f7f7-778c-47f3-8263-d3d93753e714"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.384424 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9979f7f7-778c-47f3-8263-d3d93753e714-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9979f7f7-778c-47f3-8263-d3d93753e714" (UID: "9979f7f7-778c-47f3-8263-d3d93753e714"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.396193 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9979f7f7-778c-47f3-8263-d3d93753e714-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9979f7f7-778c-47f3-8263-d3d93753e714" (UID: "9979f7f7-778c-47f3-8263-d3d93753e714"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.402569 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9979f7f7-778c-47f3-8263-d3d93753e714-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "9979f7f7-778c-47f3-8263-d3d93753e714" (UID: "9979f7f7-778c-47f3-8263-d3d93753e714"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.404215 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/9d56b0e1-cf28-4913-af18-4c13aafc539a-config\") pod \"9d56b0e1-cf28-4913-af18-4c13aafc539a\" (UID: \"9d56b0e1-cf28-4913-af18-4c13aafc539a\") " Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.404326 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cxmzv\" (UniqueName: \"kubernetes.io/projected/9d56b0e1-cf28-4913-af18-4c13aafc539a-kube-api-access-cxmzv\") pod \"9d56b0e1-cf28-4913-af18-4c13aafc539a\" (UID: \"9d56b0e1-cf28-4913-af18-4c13aafc539a\") " Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.404443 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d56b0e1-cf28-4913-af18-4c13aafc539a-combined-ca-bundle\") pod \"9d56b0e1-cf28-4913-af18-4c13aafc539a\" (UID: \"9d56b0e1-cf28-4913-af18-4c13aafc539a\") " Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.404853 4742 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9979f7f7-778c-47f3-8263-d3d93753e714-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.404873 4742 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9979f7f7-778c-47f3-8263-d3d93753e714-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.404888 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lsw9v\" (UniqueName: \"kubernetes.io/projected/9979f7f7-778c-47f3-8263-d3d93753e714-kube-api-access-lsw9v\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.404900 4742 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9979f7f7-778c-47f3-8263-d3d93753e714-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.404911 4742 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9979f7f7-778c-47f3-8263-d3d93753e714-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.406912 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9979f7f7-778c-47f3-8263-d3d93753e714-config" (OuterVolumeSpecName: "config") pod "9979f7f7-778c-47f3-8263-d3d93753e714" (UID: "9979f7f7-778c-47f3-8263-d3d93753e714"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.409156 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d56b0e1-cf28-4913-af18-4c13aafc539a-kube-api-access-cxmzv" (OuterVolumeSpecName: "kube-api-access-cxmzv") pod "9d56b0e1-cf28-4913-af18-4c13aafc539a" (UID: "9d56b0e1-cf28-4913-af18-4c13aafc539a"). InnerVolumeSpecName "kube-api-access-cxmzv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.419838 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.434990 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d56b0e1-cf28-4913-af18-4c13aafc539a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9d56b0e1-cf28-4913-af18-4c13aafc539a" (UID: "9d56b0e1-cf28-4913-af18-4c13aafc539a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.435219 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d56b0e1-cf28-4913-af18-4c13aafc539a-config" (OuterVolumeSpecName: "config") pod "9d56b0e1-cf28-4913-af18-4c13aafc539a" (UID: "9d56b0e1-cf28-4913-af18-4c13aafc539a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:11:58 crc kubenswrapper[4742]: W1205 06:11:58.438164 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcdf66b2c_d07c_4ab7_a4b6_d4b1187ead13.slice/crio-aad08aa9c8a8bd46d5df4b31fe04a16fd58bf8242346cf57129141bfc379535e WatchSource:0}: Error finding container aad08aa9c8a8bd46d5df4b31fe04a16fd58bf8242346cf57129141bfc379535e: Status 404 returned error can't find the container with id aad08aa9c8a8bd46d5df4b31fe04a16fd58bf8242346cf57129141bfc379535e Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.477005 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-r2qnr"] Dec 05 06:11:58 crc kubenswrapper[4742]: W1205 06:11:58.482774 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e2f2b5a_8ced_49df_ae20_f64d13a9938b.slice/crio-4ae7ac540920f9b0156a982f5407566fd0a0de498fd371dd022c63f056484d48 WatchSource:0}: Error finding container 4ae7ac540920f9b0156a982f5407566fd0a0de498fd371dd022c63f056484d48: Status 404 returned error can't find the container with id 4ae7ac540920f9b0156a982f5407566fd0a0de498fd371dd022c63f056484d48 Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.506566 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d56b0e1-cf28-4913-af18-4c13aafc539a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.506606 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/9d56b0e1-cf28-4913-af18-4c13aafc539a-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.506622 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9979f7f7-778c-47f3-8263-d3d93753e714-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.506634 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cxmzv\" (UniqueName: \"kubernetes.io/projected/9d56b0e1-cf28-4913-af18-4c13aafc539a-kube-api-access-cxmzv\") on node \"crc\" DevicePath \"\"" Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.931681 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerStarted","Data":"2a0650cb5fb1ecf5b2a54d2428e362d6056d9793a00fef4de45d1cd9dff294dd"} Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.934320 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-r2qnr" event={"ID":"3e2f2b5a-8ced-49df-ae20-f64d13a9938b","Type":"ContainerStarted","Data":"3dfe6d954682eaace3f2556b9d454fccb07131b5cf542e9331587cb77a98c236"} Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.934356 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-r2qnr" event={"ID":"3e2f2b5a-8ced-49df-ae20-f64d13a9938b","Type":"ContainerStarted","Data":"4ae7ac540920f9b0156a982f5407566fd0a0de498fd371dd022c63f056484d48"} Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.937367 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-j7dqz" event={"ID":"4df39ed0-9850-4409-8648-724b15671640","Type":"ContainerStarted","Data":"5c49035a642271e66ce42bb0d1a45ebedab35110a82c05d221201aa774a0b6fc"} Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.940868 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"388e7953-5819-4978-842c-1cf54fd568c9","Type":"ContainerStarted","Data":"955cfd5120b0d6e13110d4d1a290597c0926bd70e91c83a93c339f051492b6ef"} Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.943144 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-c87xk" event={"ID":"9d56b0e1-cf28-4913-af18-4c13aafc539a","Type":"ContainerDied","Data":"71fd1d5ff137d414736b1d17e8ab2f4a36b8f2992eae0df07ed01b81e60d6f5d"} Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.943318 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="71fd1d5ff137d414736b1d17e8ab2f4a36b8f2992eae0df07ed01b81e60d6f5d" Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.943498 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-c87xk" Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.965252 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13","Type":"ContainerStarted","Data":"aad08aa9c8a8bd46d5df4b31fe04a16fd58bf8242346cf57129141bfc379535e"} Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.968720 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-j7dqz" podStartSLOduration=2.291483029 podStartE2EDuration="23.968707264s" podCreationTimestamp="2025-12-05 06:11:35 +0000 UTC" firstStartedPulling="2025-12-05 06:11:36.26476037 +0000 UTC m=+1172.176895432" lastFinishedPulling="2025-12-05 06:11:57.941984605 +0000 UTC m=+1193.854119667" observedRunningTime="2025-12-05 06:11:58.965399455 +0000 UTC m=+1194.877534517" watchObservedRunningTime="2025-12-05 06:11:58.968707264 +0000 UTC m=+1194.880842326" Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.973153 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.975116 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="bd351f0d-461e-4d10-9804-36f61fb0ed1d" containerName="glance-log" containerID="cri-o://82be5802cfde6ab8b8b021b37ae8197e7c5b5e2e4458917e6cf9e96c6446e0f0" gracePeriod=30 Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.975429 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="bd351f0d-461e-4d10-9804-36f61fb0ed1d" containerName="glance-httpd" containerID="cri-o://63eef1a4a2136716a5386ca02a4ae420c6a55dff906131f6301d2ce4e94ec10d" gracePeriod=30 Dec 05 06:11:58 crc kubenswrapper[4742]: I1205 06:11:58.975498 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"bd351f0d-461e-4d10-9804-36f61fb0ed1d","Type":"ContainerStarted","Data":"63eef1a4a2136716a5386ca02a4ae420c6a55dff906131f6301d2ce4e94ec10d"} Dec 05 06:11:59 crc kubenswrapper[4742]: E1205 06:11:58.986009 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-4svn8" podUID="b0730438-d5e9-48c6-b5d1-280b1fb0f4b1" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.003540 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-r2qnr" podStartSLOduration=12.00351857 podStartE2EDuration="12.00351857s" podCreationTimestamp="2025-12-05 06:11:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:11:58.983780005 +0000 UTC m=+1194.895915077" watchObservedRunningTime="2025-12-05 06:11:59.00351857 +0000 UTC m=+1194.915653652" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.014296 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=25.014275117 podStartE2EDuration="25.014275117s" podCreationTimestamp="2025-12-05 06:11:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:11:59.003926991 +0000 UTC m=+1194.916062053" watchObservedRunningTime="2025-12-05 06:11:59.014275117 +0000 UTC m=+1194.926410189" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.065874 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-mlw4h"] Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.073567 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-mlw4h"] Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.554874 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-m9skl"] Dec 05 06:11:59 crc kubenswrapper[4742]: E1205 06:11:59.558040 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9979f7f7-778c-47f3-8263-d3d93753e714" containerName="init" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.559006 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="9979f7f7-778c-47f3-8263-d3d93753e714" containerName="init" Dec 05 06:11:59 crc kubenswrapper[4742]: E1205 06:11:59.559158 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d56b0e1-cf28-4913-af18-4c13aafc539a" containerName="neutron-db-sync" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.559226 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d56b0e1-cf28-4913-af18-4c13aafc539a" containerName="neutron-db-sync" Dec 05 06:11:59 crc kubenswrapper[4742]: E1205 06:11:59.559297 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9979f7f7-778c-47f3-8263-d3d93753e714" containerName="dnsmasq-dns" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.559371 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="9979f7f7-778c-47f3-8263-d3d93753e714" containerName="dnsmasq-dns" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.559693 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d56b0e1-cf28-4913-af18-4c13aafc539a" containerName="neutron-db-sync" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.559799 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="9979f7f7-778c-47f3-8263-d3d93753e714" containerName="dnsmasq-dns" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.561041 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-m9skl" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.571974 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-m9skl"] Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.652304 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8620635a-6cc9-4c28-9a23-46017882bcb2-dns-swift-storage-0\") pod \"dnsmasq-dns-6b7b667979-m9skl\" (UID: \"8620635a-6cc9-4c28-9a23-46017882bcb2\") " pod="openstack/dnsmasq-dns-6b7b667979-m9skl" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.652378 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8620635a-6cc9-4c28-9a23-46017882bcb2-dns-svc\") pod \"dnsmasq-dns-6b7b667979-m9skl\" (UID: \"8620635a-6cc9-4c28-9a23-46017882bcb2\") " pod="openstack/dnsmasq-dns-6b7b667979-m9skl" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.652461 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8620635a-6cc9-4c28-9a23-46017882bcb2-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7b667979-m9skl\" (UID: \"8620635a-6cc9-4c28-9a23-46017882bcb2\") " pod="openstack/dnsmasq-dns-6b7b667979-m9skl" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.652547 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8620635a-6cc9-4c28-9a23-46017882bcb2-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7b667979-m9skl\" (UID: \"8620635a-6cc9-4c28-9a23-46017882bcb2\") " pod="openstack/dnsmasq-dns-6b7b667979-m9skl" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.652601 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8620635a-6cc9-4c28-9a23-46017882bcb2-config\") pod \"dnsmasq-dns-6b7b667979-m9skl\" (UID: \"8620635a-6cc9-4c28-9a23-46017882bcb2\") " pod="openstack/dnsmasq-dns-6b7b667979-m9skl" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.652669 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzwn6\" (UniqueName: \"kubernetes.io/projected/8620635a-6cc9-4c28-9a23-46017882bcb2-kube-api-access-dzwn6\") pod \"dnsmasq-dns-6b7b667979-m9skl\" (UID: \"8620635a-6cc9-4c28-9a23-46017882bcb2\") " pod="openstack/dnsmasq-dns-6b7b667979-m9skl" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.681156 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7dcd478554-rrcm8"] Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.683034 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7dcd478554-rrcm8" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.690100 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-km54d" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.690643 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.690792 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.690852 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.698293 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7dcd478554-rrcm8"] Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.753988 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a1f68ebd-adfb-406e-ac16-d14599ea9bc3-httpd-config\") pod \"neutron-7dcd478554-rrcm8\" (UID: \"a1f68ebd-adfb-406e-ac16-d14599ea9bc3\") " pod="openstack/neutron-7dcd478554-rrcm8" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.754035 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1f68ebd-adfb-406e-ac16-d14599ea9bc3-combined-ca-bundle\") pod \"neutron-7dcd478554-rrcm8\" (UID: \"a1f68ebd-adfb-406e-ac16-d14599ea9bc3\") " pod="openstack/neutron-7dcd478554-rrcm8" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.754086 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8620635a-6cc9-4c28-9a23-46017882bcb2-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7b667979-m9skl\" (UID: \"8620635a-6cc9-4c28-9a23-46017882bcb2\") " pod="openstack/dnsmasq-dns-6b7b667979-m9skl" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.754114 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8620635a-6cc9-4c28-9a23-46017882bcb2-config\") pod \"dnsmasq-dns-6b7b667979-m9skl\" (UID: \"8620635a-6cc9-4c28-9a23-46017882bcb2\") " pod="openstack/dnsmasq-dns-6b7b667979-m9skl" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.754150 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzwn6\" (UniqueName: \"kubernetes.io/projected/8620635a-6cc9-4c28-9a23-46017882bcb2-kube-api-access-dzwn6\") pod \"dnsmasq-dns-6b7b667979-m9skl\" (UID: \"8620635a-6cc9-4c28-9a23-46017882bcb2\") " pod="openstack/dnsmasq-dns-6b7b667979-m9skl" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.754170 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1f68ebd-adfb-406e-ac16-d14599ea9bc3-ovndb-tls-certs\") pod \"neutron-7dcd478554-rrcm8\" (UID: \"a1f68ebd-adfb-406e-ac16-d14599ea9bc3\") " pod="openstack/neutron-7dcd478554-rrcm8" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.754217 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8620635a-6cc9-4c28-9a23-46017882bcb2-dns-swift-storage-0\") pod \"dnsmasq-dns-6b7b667979-m9skl\" (UID: \"8620635a-6cc9-4c28-9a23-46017882bcb2\") " pod="openstack/dnsmasq-dns-6b7b667979-m9skl" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.754239 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8620635a-6cc9-4c28-9a23-46017882bcb2-dns-svc\") pod \"dnsmasq-dns-6b7b667979-m9skl\" (UID: \"8620635a-6cc9-4c28-9a23-46017882bcb2\") " pod="openstack/dnsmasq-dns-6b7b667979-m9skl" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.754261 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a1f68ebd-adfb-406e-ac16-d14599ea9bc3-config\") pod \"neutron-7dcd478554-rrcm8\" (UID: \"a1f68ebd-adfb-406e-ac16-d14599ea9bc3\") " pod="openstack/neutron-7dcd478554-rrcm8" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.754287 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfs88\" (UniqueName: \"kubernetes.io/projected/a1f68ebd-adfb-406e-ac16-d14599ea9bc3-kube-api-access-hfs88\") pod \"neutron-7dcd478554-rrcm8\" (UID: \"a1f68ebd-adfb-406e-ac16-d14599ea9bc3\") " pod="openstack/neutron-7dcd478554-rrcm8" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.754307 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8620635a-6cc9-4c28-9a23-46017882bcb2-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7b667979-m9skl\" (UID: \"8620635a-6cc9-4c28-9a23-46017882bcb2\") " pod="openstack/dnsmasq-dns-6b7b667979-m9skl" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.755578 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8620635a-6cc9-4c28-9a23-46017882bcb2-dns-svc\") pod \"dnsmasq-dns-6b7b667979-m9skl\" (UID: \"8620635a-6cc9-4c28-9a23-46017882bcb2\") " pod="openstack/dnsmasq-dns-6b7b667979-m9skl" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.755651 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8620635a-6cc9-4c28-9a23-46017882bcb2-dns-swift-storage-0\") pod \"dnsmasq-dns-6b7b667979-m9skl\" (UID: \"8620635a-6cc9-4c28-9a23-46017882bcb2\") " pod="openstack/dnsmasq-dns-6b7b667979-m9skl" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.755886 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8620635a-6cc9-4c28-9a23-46017882bcb2-config\") pod \"dnsmasq-dns-6b7b667979-m9skl\" (UID: \"8620635a-6cc9-4c28-9a23-46017882bcb2\") " pod="openstack/dnsmasq-dns-6b7b667979-m9skl" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.757293 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8620635a-6cc9-4c28-9a23-46017882bcb2-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7b667979-m9skl\" (UID: \"8620635a-6cc9-4c28-9a23-46017882bcb2\") " pod="openstack/dnsmasq-dns-6b7b667979-m9skl" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.757716 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8620635a-6cc9-4c28-9a23-46017882bcb2-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7b667979-m9skl\" (UID: \"8620635a-6cc9-4c28-9a23-46017882bcb2\") " pod="openstack/dnsmasq-dns-6b7b667979-m9skl" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.771967 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzwn6\" (UniqueName: \"kubernetes.io/projected/8620635a-6cc9-4c28-9a23-46017882bcb2-kube-api-access-dzwn6\") pod \"dnsmasq-dns-6b7b667979-m9skl\" (UID: \"8620635a-6cc9-4c28-9a23-46017882bcb2\") " pod="openstack/dnsmasq-dns-6b7b667979-m9skl" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.858697 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1f68ebd-adfb-406e-ac16-d14599ea9bc3-ovndb-tls-certs\") pod \"neutron-7dcd478554-rrcm8\" (UID: \"a1f68ebd-adfb-406e-ac16-d14599ea9bc3\") " pod="openstack/neutron-7dcd478554-rrcm8" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.858860 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a1f68ebd-adfb-406e-ac16-d14599ea9bc3-config\") pod \"neutron-7dcd478554-rrcm8\" (UID: \"a1f68ebd-adfb-406e-ac16-d14599ea9bc3\") " pod="openstack/neutron-7dcd478554-rrcm8" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.858916 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfs88\" (UniqueName: \"kubernetes.io/projected/a1f68ebd-adfb-406e-ac16-d14599ea9bc3-kube-api-access-hfs88\") pod \"neutron-7dcd478554-rrcm8\" (UID: \"a1f68ebd-adfb-406e-ac16-d14599ea9bc3\") " pod="openstack/neutron-7dcd478554-rrcm8" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.858986 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a1f68ebd-adfb-406e-ac16-d14599ea9bc3-httpd-config\") pod \"neutron-7dcd478554-rrcm8\" (UID: \"a1f68ebd-adfb-406e-ac16-d14599ea9bc3\") " pod="openstack/neutron-7dcd478554-rrcm8" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.859022 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1f68ebd-adfb-406e-ac16-d14599ea9bc3-combined-ca-bundle\") pod \"neutron-7dcd478554-rrcm8\" (UID: \"a1f68ebd-adfb-406e-ac16-d14599ea9bc3\") " pod="openstack/neutron-7dcd478554-rrcm8" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.864786 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1f68ebd-adfb-406e-ac16-d14599ea9bc3-combined-ca-bundle\") pod \"neutron-7dcd478554-rrcm8\" (UID: \"a1f68ebd-adfb-406e-ac16-d14599ea9bc3\") " pod="openstack/neutron-7dcd478554-rrcm8" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.865439 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1f68ebd-adfb-406e-ac16-d14599ea9bc3-ovndb-tls-certs\") pod \"neutron-7dcd478554-rrcm8\" (UID: \"a1f68ebd-adfb-406e-ac16-d14599ea9bc3\") " pod="openstack/neutron-7dcd478554-rrcm8" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.873382 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/a1f68ebd-adfb-406e-ac16-d14599ea9bc3-config\") pod \"neutron-7dcd478554-rrcm8\" (UID: \"a1f68ebd-adfb-406e-ac16-d14599ea9bc3\") " pod="openstack/neutron-7dcd478554-rrcm8" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.877628 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a1f68ebd-adfb-406e-ac16-d14599ea9bc3-httpd-config\") pod \"neutron-7dcd478554-rrcm8\" (UID: \"a1f68ebd-adfb-406e-ac16-d14599ea9bc3\") " pod="openstack/neutron-7dcd478554-rrcm8" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.900955 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfs88\" (UniqueName: \"kubernetes.io/projected/a1f68ebd-adfb-406e-ac16-d14599ea9bc3-kube-api-access-hfs88\") pod \"neutron-7dcd478554-rrcm8\" (UID: \"a1f68ebd-adfb-406e-ac16-d14599ea9bc3\") " pod="openstack/neutron-7dcd478554-rrcm8" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.905887 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-m9skl" Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.987258 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13","Type":"ContainerStarted","Data":"18773fd8cfb54fd920520afc01e72e5a3ec52859012030bd87061488e8603a77"} Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.991141 4742 generic.go:334] "Generic (PLEG): container finished" podID="bd351f0d-461e-4d10-9804-36f61fb0ed1d" containerID="63eef1a4a2136716a5386ca02a4ae420c6a55dff906131f6301d2ce4e94ec10d" exitCode=0 Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.991194 4742 generic.go:334] "Generic (PLEG): container finished" podID="bd351f0d-461e-4d10-9804-36f61fb0ed1d" containerID="82be5802cfde6ab8b8b021b37ae8197e7c5b5e2e4458917e6cf9e96c6446e0f0" exitCode=143 Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.991360 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"bd351f0d-461e-4d10-9804-36f61fb0ed1d","Type":"ContainerDied","Data":"63eef1a4a2136716a5386ca02a4ae420c6a55dff906131f6301d2ce4e94ec10d"} Dec 05 06:11:59 crc kubenswrapper[4742]: I1205 06:11:59.991402 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"bd351f0d-461e-4d10-9804-36f61fb0ed1d","Type":"ContainerDied","Data":"82be5802cfde6ab8b8b021b37ae8197e7c5b5e2e4458917e6cf9e96c6446e0f0"} Dec 05 06:12:00 crc kubenswrapper[4742]: I1205 06:12:00.025386 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7dcd478554-rrcm8" Dec 05 06:12:00 crc kubenswrapper[4742]: I1205 06:12:00.399956 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9979f7f7-778c-47f3-8263-d3d93753e714" path="/var/lib/kubelet/pods/9979f7f7-778c-47f3-8263-d3d93753e714/volumes" Dec 05 06:12:00 crc kubenswrapper[4742]: I1205 06:12:00.452638 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 06:12:00 crc kubenswrapper[4742]: I1205 06:12:00.570720 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/bd351f0d-461e-4d10-9804-36f61fb0ed1d-httpd-run\") pod \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") " Dec 05 06:12:00 crc kubenswrapper[4742]: I1205 06:12:00.571346 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd351f0d-461e-4d10-9804-36f61fb0ed1d-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "bd351f0d-461e-4d10-9804-36f61fb0ed1d" (UID: "bd351f0d-461e-4d10-9804-36f61fb0ed1d"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:12:00 crc kubenswrapper[4742]: I1205 06:12:00.571382 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd351f0d-461e-4d10-9804-36f61fb0ed1d-public-tls-certs\") pod \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") " Dec 05 06:12:00 crc kubenswrapper[4742]: I1205 06:12:00.571603 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd351f0d-461e-4d10-9804-36f61fb0ed1d-config-data\") pod \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") " Dec 05 06:12:00 crc kubenswrapper[4742]: I1205 06:12:00.571652 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bd351f0d-461e-4d10-9804-36f61fb0ed1d-logs\") pod \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") " Dec 05 06:12:00 crc kubenswrapper[4742]: I1205 06:12:00.571688 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bd351f0d-461e-4d10-9804-36f61fb0ed1d-scripts\") pod \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") " Dec 05 06:12:00 crc kubenswrapper[4742]: I1205 06:12:00.571734 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lc6n4\" (UniqueName: \"kubernetes.io/projected/bd351f0d-461e-4d10-9804-36f61fb0ed1d-kube-api-access-lc6n4\") pod \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") " Dec 05 06:12:00 crc kubenswrapper[4742]: I1205 06:12:00.571781 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") " Dec 05 06:12:00 crc kubenswrapper[4742]: I1205 06:12:00.571817 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd351f0d-461e-4d10-9804-36f61fb0ed1d-combined-ca-bundle\") pod \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\" (UID: \"bd351f0d-461e-4d10-9804-36f61fb0ed1d\") " Dec 05 06:12:00 crc kubenswrapper[4742]: I1205 06:12:00.572228 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd351f0d-461e-4d10-9804-36f61fb0ed1d-logs" (OuterVolumeSpecName: "logs") pod "bd351f0d-461e-4d10-9804-36f61fb0ed1d" (UID: "bd351f0d-461e-4d10-9804-36f61fb0ed1d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:12:00 crc kubenswrapper[4742]: I1205 06:12:00.572245 4742 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/bd351f0d-461e-4d10-9804-36f61fb0ed1d-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:00 crc kubenswrapper[4742]: I1205 06:12:00.580788 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "glance") pod "bd351f0d-461e-4d10-9804-36f61fb0ed1d" (UID: "bd351f0d-461e-4d10-9804-36f61fb0ed1d"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 06:12:00 crc kubenswrapper[4742]: I1205 06:12:00.581519 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd351f0d-461e-4d10-9804-36f61fb0ed1d-scripts" (OuterVolumeSpecName: "scripts") pod "bd351f0d-461e-4d10-9804-36f61fb0ed1d" (UID: "bd351f0d-461e-4d10-9804-36f61fb0ed1d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:00 crc kubenswrapper[4742]: I1205 06:12:00.581771 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd351f0d-461e-4d10-9804-36f61fb0ed1d-kube-api-access-lc6n4" (OuterVolumeSpecName: "kube-api-access-lc6n4") pod "bd351f0d-461e-4d10-9804-36f61fb0ed1d" (UID: "bd351f0d-461e-4d10-9804-36f61fb0ed1d"). InnerVolumeSpecName "kube-api-access-lc6n4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:12:00 crc kubenswrapper[4742]: I1205 06:12:00.617145 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd351f0d-461e-4d10-9804-36f61fb0ed1d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bd351f0d-461e-4d10-9804-36f61fb0ed1d" (UID: "bd351f0d-461e-4d10-9804-36f61fb0ed1d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:00 crc kubenswrapper[4742]: I1205 06:12:00.642139 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd351f0d-461e-4d10-9804-36f61fb0ed1d-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "bd351f0d-461e-4d10-9804-36f61fb0ed1d" (UID: "bd351f0d-461e-4d10-9804-36f61fb0ed1d"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:00 crc kubenswrapper[4742]: I1205 06:12:00.657279 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd351f0d-461e-4d10-9804-36f61fb0ed1d-config-data" (OuterVolumeSpecName: "config-data") pod "bd351f0d-461e-4d10-9804-36f61fb0ed1d" (UID: "bd351f0d-461e-4d10-9804-36f61fb0ed1d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:00 crc kubenswrapper[4742]: I1205 06:12:00.673266 4742 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bd351f0d-461e-4d10-9804-36f61fb0ed1d-logs\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:00 crc kubenswrapper[4742]: I1205 06:12:00.673296 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bd351f0d-461e-4d10-9804-36f61fb0ed1d-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:00 crc kubenswrapper[4742]: I1205 06:12:00.673306 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lc6n4\" (UniqueName: \"kubernetes.io/projected/bd351f0d-461e-4d10-9804-36f61fb0ed1d-kube-api-access-lc6n4\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:00 crc kubenswrapper[4742]: I1205 06:12:00.673342 4742 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Dec 05 06:12:00 crc kubenswrapper[4742]: I1205 06:12:00.673353 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd351f0d-461e-4d10-9804-36f61fb0ed1d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:00 crc kubenswrapper[4742]: I1205 06:12:00.673362 4742 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd351f0d-461e-4d10-9804-36f61fb0ed1d-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:00 crc kubenswrapper[4742]: I1205 06:12:00.673370 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bd351f0d-461e-4d10-9804-36f61fb0ed1d-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:00 crc kubenswrapper[4742]: I1205 06:12:00.696031 4742 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Dec 05 06:12:00 crc kubenswrapper[4742]: I1205 06:12:00.774494 4742 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:00 crc kubenswrapper[4742]: I1205 06:12:00.935244 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-m9skl"] Dec 05 06:12:00 crc kubenswrapper[4742]: W1205 06:12:00.939901 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8620635a_6cc9_4c28_9a23_46017882bcb2.slice/crio-ebb7e4671e61f51a4f664e48658a0b047544c021ef12eb35cbe367a74d9bef05 WatchSource:0}: Error finding container ebb7e4671e61f51a4f664e48658a0b047544c021ef12eb35cbe367a74d9bef05: Status 404 returned error can't find the container with id ebb7e4671e61f51a4f664e48658a0b047544c021ef12eb35cbe367a74d9bef05 Dec 05 06:12:00 crc kubenswrapper[4742]: I1205 06:12:00.944358 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7dcd478554-rrcm8"] Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.004082 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13","Type":"ContainerStarted","Data":"6f7180aed7ceb51843eaabd0a032e872d10e578593ffbf206208c7908ccaad69"} Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.008033 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"bd351f0d-461e-4d10-9804-36f61fb0ed1d","Type":"ContainerDied","Data":"81f40442d3559e270714681d060bd47dc1d4d02ddcd0c1b8e728293bf7ddb402"} Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.008145 4742 scope.go:117] "RemoveContainer" containerID="63eef1a4a2136716a5386ca02a4ae420c6a55dff906131f6301d2ce4e94ec10d" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.008244 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.022298 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-m9skl" event={"ID":"8620635a-6cc9-4c28-9a23-46017882bcb2","Type":"ContainerStarted","Data":"ebb7e4671e61f51a4f664e48658a0b047544c021ef12eb35cbe367a74d9bef05"} Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.023725 4742 generic.go:334] "Generic (PLEG): container finished" podID="4df39ed0-9850-4409-8648-724b15671640" containerID="5c49035a642271e66ce42bb0d1a45ebedab35110a82c05d221201aa774a0b6fc" exitCode=0 Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.023769 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-j7dqz" event={"ID":"4df39ed0-9850-4409-8648-724b15671640","Type":"ContainerDied","Data":"5c49035a642271e66ce42bb0d1a45ebedab35110a82c05d221201aa774a0b6fc"} Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.024602 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7dcd478554-rrcm8" event={"ID":"a1f68ebd-adfb-406e-ac16-d14599ea9bc3","Type":"ContainerStarted","Data":"193b4471666416cb77c41f8fc7cc0a723745a171e3bb205aed8ec6e4b68569c2"} Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.050233 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"388e7953-5819-4978-842c-1cf54fd568c9","Type":"ContainerStarted","Data":"4277d32a24ea03efda6807e804b77cafeebe7c67b2fec2c05c23aeeef287955d"} Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.077041 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=19.077024562 podStartE2EDuration="19.077024562s" podCreationTimestamp="2025-12-05 06:11:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:12:01.049413417 +0000 UTC m=+1196.961548479" watchObservedRunningTime="2025-12-05 06:12:01.077024562 +0000 UTC m=+1196.989159624" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.110258 4742 scope.go:117] "RemoveContainer" containerID="82be5802cfde6ab8b8b021b37ae8197e7c5b5e2e4458917e6cf9e96c6446e0f0" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.160185 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.167476 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.176592 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 06:12:01 crc kubenswrapper[4742]: E1205 06:12:01.176882 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd351f0d-461e-4d10-9804-36f61fb0ed1d" containerName="glance-log" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.176893 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd351f0d-461e-4d10-9804-36f61fb0ed1d" containerName="glance-log" Dec 05 06:12:01 crc kubenswrapper[4742]: E1205 06:12:01.176926 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd351f0d-461e-4d10-9804-36f61fb0ed1d" containerName="glance-httpd" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.176931 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd351f0d-461e-4d10-9804-36f61fb0ed1d" containerName="glance-httpd" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.177118 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd351f0d-461e-4d10-9804-36f61fb0ed1d" containerName="glance-httpd" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.177139 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd351f0d-461e-4d10-9804-36f61fb0ed1d" containerName="glance-log" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.177902 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.181160 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.182090 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.188559 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.390978 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3450678-40d0-44f3-bcd6-c9d5b773812b-scripts\") pod \"glance-default-external-api-0\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") " pod="openstack/glance-default-external-api-0" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.391313 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3450678-40d0-44f3-bcd6-c9d5b773812b-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") " pod="openstack/glance-default-external-api-0" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.391358 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a3450678-40d0-44f3-bcd6-c9d5b773812b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") " pod="openstack/glance-default-external-api-0" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.391384 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3450678-40d0-44f3-bcd6-c9d5b773812b-logs\") pod \"glance-default-external-api-0\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") " pod="openstack/glance-default-external-api-0" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.391409 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g9f7d\" (UniqueName: \"kubernetes.io/projected/a3450678-40d0-44f3-bcd6-c9d5b773812b-kube-api-access-g9f7d\") pod \"glance-default-external-api-0\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") " pod="openstack/glance-default-external-api-0" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.391448 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") " pod="openstack/glance-default-external-api-0" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.391474 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3450678-40d0-44f3-bcd6-c9d5b773812b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") " pod="openstack/glance-default-external-api-0" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.391493 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3450678-40d0-44f3-bcd6-c9d5b773812b-config-data\") pod \"glance-default-external-api-0\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") " pod="openstack/glance-default-external-api-0" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.493465 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a3450678-40d0-44f3-bcd6-c9d5b773812b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") " pod="openstack/glance-default-external-api-0" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.493528 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3450678-40d0-44f3-bcd6-c9d5b773812b-logs\") pod \"glance-default-external-api-0\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") " pod="openstack/glance-default-external-api-0" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.493560 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g9f7d\" (UniqueName: \"kubernetes.io/projected/a3450678-40d0-44f3-bcd6-c9d5b773812b-kube-api-access-g9f7d\") pod \"glance-default-external-api-0\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") " pod="openstack/glance-default-external-api-0" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.493602 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") " pod="openstack/glance-default-external-api-0" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.493625 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3450678-40d0-44f3-bcd6-c9d5b773812b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") " pod="openstack/glance-default-external-api-0" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.493653 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3450678-40d0-44f3-bcd6-c9d5b773812b-config-data\") pod \"glance-default-external-api-0\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") " pod="openstack/glance-default-external-api-0" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.493704 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3450678-40d0-44f3-bcd6-c9d5b773812b-scripts\") pod \"glance-default-external-api-0\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") " pod="openstack/glance-default-external-api-0" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.493744 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3450678-40d0-44f3-bcd6-c9d5b773812b-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") " pod="openstack/glance-default-external-api-0" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.494911 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3450678-40d0-44f3-bcd6-c9d5b773812b-logs\") pod \"glance-default-external-api-0\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") " pod="openstack/glance-default-external-api-0" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.494948 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a3450678-40d0-44f3-bcd6-c9d5b773812b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") " pod="openstack/glance-default-external-api-0" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.497960 4742 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/glance-default-external-api-0" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.498409 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3450678-40d0-44f3-bcd6-c9d5b773812b-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") " pod="openstack/glance-default-external-api-0" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.498647 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3450678-40d0-44f3-bcd6-c9d5b773812b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") " pod="openstack/glance-default-external-api-0" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.506371 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3450678-40d0-44f3-bcd6-c9d5b773812b-config-data\") pod \"glance-default-external-api-0\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") " pod="openstack/glance-default-external-api-0" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.509496 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3450678-40d0-44f3-bcd6-c9d5b773812b-scripts\") pod \"glance-default-external-api-0\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") " pod="openstack/glance-default-external-api-0" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.522813 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g9f7d\" (UniqueName: \"kubernetes.io/projected/a3450678-40d0-44f3-bcd6-c9d5b773812b-kube-api-access-g9f7d\") pod \"glance-default-external-api-0\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") " pod="openstack/glance-default-external-api-0" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.526356 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") " pod="openstack/glance-default-external-api-0" Dec 05 06:12:01 crc kubenswrapper[4742]: I1205 06:12:01.567691 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.072887 4742 generic.go:334] "Generic (PLEG): container finished" podID="8620635a-6cc9-4c28-9a23-46017882bcb2" containerID="2aafa85d6212561f15a268369138905fcc47bfc7c260a0f1db5a1bb422e74c99" exitCode=0 Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.073359 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-m9skl" event={"ID":"8620635a-6cc9-4c28-9a23-46017882bcb2","Type":"ContainerDied","Data":"2aafa85d6212561f15a268369138905fcc47bfc7c260a0f1db5a1bb422e74c99"} Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.093485 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7dcd478554-rrcm8" event={"ID":"a1f68ebd-adfb-406e-ac16-d14599ea9bc3","Type":"ContainerStarted","Data":"6ecbe3675537cc34ac2ba78f89c09921c117948ce41e6442e7c86331efe37a8e"} Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.093554 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7dcd478554-rrcm8" event={"ID":"a1f68ebd-adfb-406e-ac16-d14599ea9bc3","Type":"ContainerStarted","Data":"60e83da0e60acdbe1f32ff6eba4e9b83b35dd5e1aefbfa932805f3c182231fc8"} Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.094642 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-7dcd478554-rrcm8" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.101564 4742 generic.go:334] "Generic (PLEG): container finished" podID="3e2f2b5a-8ced-49df-ae20-f64d13a9938b" containerID="3dfe6d954682eaace3f2556b9d454fccb07131b5cf542e9331587cb77a98c236" exitCode=0 Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.102681 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-r2qnr" event={"ID":"3e2f2b5a-8ced-49df-ae20-f64d13a9938b","Type":"ContainerDied","Data":"3dfe6d954682eaace3f2556b9d454fccb07131b5cf542e9331587cb77a98c236"} Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.102985 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5f59b8f679-mlw4h" podUID="9979f7f7-778c-47f3-8263-d3d93753e714" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.128:5353: i/o timeout" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.145415 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-574f89688c-hbh7m"] Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.146955 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-574f89688c-hbh7m" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.158862 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-7dcd478554-rrcm8" podStartSLOduration=3.158839368 podStartE2EDuration="3.158839368s" podCreationTimestamp="2025-12-05 06:11:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:12:02.131335596 +0000 UTC m=+1198.043470658" watchObservedRunningTime="2025-12-05 06:12:02.158839368 +0000 UTC m=+1198.070974430" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.163090 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.163251 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.168417 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-574f89688c-hbh7m"] Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.224896 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.271670 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qw9n4\" (UniqueName: \"kubernetes.io/projected/65eece87-4279-4d6c-b2a6-5841fd5b3298-kube-api-access-qw9n4\") pod \"neutron-574f89688c-hbh7m\" (UID: \"65eece87-4279-4d6c-b2a6-5841fd5b3298\") " pod="openstack/neutron-574f89688c-hbh7m" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.271763 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-config\") pod \"neutron-574f89688c-hbh7m\" (UID: \"65eece87-4279-4d6c-b2a6-5841fd5b3298\") " pod="openstack/neutron-574f89688c-hbh7m" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.271822 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-ovndb-tls-certs\") pod \"neutron-574f89688c-hbh7m\" (UID: \"65eece87-4279-4d6c-b2a6-5841fd5b3298\") " pod="openstack/neutron-574f89688c-hbh7m" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.271840 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-public-tls-certs\") pod \"neutron-574f89688c-hbh7m\" (UID: \"65eece87-4279-4d6c-b2a6-5841fd5b3298\") " pod="openstack/neutron-574f89688c-hbh7m" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.271889 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-internal-tls-certs\") pod \"neutron-574f89688c-hbh7m\" (UID: \"65eece87-4279-4d6c-b2a6-5841fd5b3298\") " pod="openstack/neutron-574f89688c-hbh7m" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.271926 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-httpd-config\") pod \"neutron-574f89688c-hbh7m\" (UID: \"65eece87-4279-4d6c-b2a6-5841fd5b3298\") " pod="openstack/neutron-574f89688c-hbh7m" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.271992 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-combined-ca-bundle\") pod \"neutron-574f89688c-hbh7m\" (UID: \"65eece87-4279-4d6c-b2a6-5841fd5b3298\") " pod="openstack/neutron-574f89688c-hbh7m" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.373934 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qw9n4\" (UniqueName: \"kubernetes.io/projected/65eece87-4279-4d6c-b2a6-5841fd5b3298-kube-api-access-qw9n4\") pod \"neutron-574f89688c-hbh7m\" (UID: \"65eece87-4279-4d6c-b2a6-5841fd5b3298\") " pod="openstack/neutron-574f89688c-hbh7m" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.373990 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-config\") pod \"neutron-574f89688c-hbh7m\" (UID: \"65eece87-4279-4d6c-b2a6-5841fd5b3298\") " pod="openstack/neutron-574f89688c-hbh7m" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.374017 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-public-tls-certs\") pod \"neutron-574f89688c-hbh7m\" (UID: \"65eece87-4279-4d6c-b2a6-5841fd5b3298\") " pod="openstack/neutron-574f89688c-hbh7m" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.374034 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-ovndb-tls-certs\") pod \"neutron-574f89688c-hbh7m\" (UID: \"65eece87-4279-4d6c-b2a6-5841fd5b3298\") " pod="openstack/neutron-574f89688c-hbh7m" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.374087 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-internal-tls-certs\") pod \"neutron-574f89688c-hbh7m\" (UID: \"65eece87-4279-4d6c-b2a6-5841fd5b3298\") " pod="openstack/neutron-574f89688c-hbh7m" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.374128 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-httpd-config\") pod \"neutron-574f89688c-hbh7m\" (UID: \"65eece87-4279-4d6c-b2a6-5841fd5b3298\") " pod="openstack/neutron-574f89688c-hbh7m" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.374168 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-combined-ca-bundle\") pod \"neutron-574f89688c-hbh7m\" (UID: \"65eece87-4279-4d6c-b2a6-5841fd5b3298\") " pod="openstack/neutron-574f89688c-hbh7m" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.378088 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-public-tls-certs\") pod \"neutron-574f89688c-hbh7m\" (UID: \"65eece87-4279-4d6c-b2a6-5841fd5b3298\") " pod="openstack/neutron-574f89688c-hbh7m" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.378430 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-httpd-config\") pod \"neutron-574f89688c-hbh7m\" (UID: \"65eece87-4279-4d6c-b2a6-5841fd5b3298\") " pod="openstack/neutron-574f89688c-hbh7m" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.381444 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-config\") pod \"neutron-574f89688c-hbh7m\" (UID: \"65eece87-4279-4d6c-b2a6-5841fd5b3298\") " pod="openstack/neutron-574f89688c-hbh7m" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.381940 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-ovndb-tls-certs\") pod \"neutron-574f89688c-hbh7m\" (UID: \"65eece87-4279-4d6c-b2a6-5841fd5b3298\") " pod="openstack/neutron-574f89688c-hbh7m" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.384068 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-internal-tls-certs\") pod \"neutron-574f89688c-hbh7m\" (UID: \"65eece87-4279-4d6c-b2a6-5841fd5b3298\") " pod="openstack/neutron-574f89688c-hbh7m" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.388137 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-combined-ca-bundle\") pod \"neutron-574f89688c-hbh7m\" (UID: \"65eece87-4279-4d6c-b2a6-5841fd5b3298\") " pod="openstack/neutron-574f89688c-hbh7m" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.391907 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qw9n4\" (UniqueName: \"kubernetes.io/projected/65eece87-4279-4d6c-b2a6-5841fd5b3298-kube-api-access-qw9n4\") pod \"neutron-574f89688c-hbh7m\" (UID: \"65eece87-4279-4d6c-b2a6-5841fd5b3298\") " pod="openstack/neutron-574f89688c-hbh7m" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.398590 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd351f0d-461e-4d10-9804-36f61fb0ed1d" path="/var/lib/kubelet/pods/bd351f0d-461e-4d10-9804-36f61fb0ed1d/volumes" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.421975 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-j7dqz" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.457919 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.457962 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.475042 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4df39ed0-9850-4409-8648-724b15671640-config-data\") pod \"4df39ed0-9850-4409-8648-724b15671640\" (UID: \"4df39ed0-9850-4409-8648-724b15671640\") " Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.475181 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4df39ed0-9850-4409-8648-724b15671640-scripts\") pod \"4df39ed0-9850-4409-8648-724b15671640\" (UID: \"4df39ed0-9850-4409-8648-724b15671640\") " Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.475213 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4df39ed0-9850-4409-8648-724b15671640-combined-ca-bundle\") pod \"4df39ed0-9850-4409-8648-724b15671640\" (UID: \"4df39ed0-9850-4409-8648-724b15671640\") " Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.475236 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4df39ed0-9850-4409-8648-724b15671640-logs\") pod \"4df39ed0-9850-4409-8648-724b15671640\" (UID: \"4df39ed0-9850-4409-8648-724b15671640\") " Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.475262 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xh44w\" (UniqueName: \"kubernetes.io/projected/4df39ed0-9850-4409-8648-724b15671640-kube-api-access-xh44w\") pod \"4df39ed0-9850-4409-8648-724b15671640\" (UID: \"4df39ed0-9850-4409-8648-724b15671640\") " Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.476831 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4df39ed0-9850-4409-8648-724b15671640-logs" (OuterVolumeSpecName: "logs") pod "4df39ed0-9850-4409-8648-724b15671640" (UID: "4df39ed0-9850-4409-8648-724b15671640"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.481660 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4df39ed0-9850-4409-8648-724b15671640-kube-api-access-xh44w" (OuterVolumeSpecName: "kube-api-access-xh44w") pod "4df39ed0-9850-4409-8648-724b15671640" (UID: "4df39ed0-9850-4409-8648-724b15671640"). InnerVolumeSpecName "kube-api-access-xh44w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.487359 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4df39ed0-9850-4409-8648-724b15671640-scripts" (OuterVolumeSpecName: "scripts") pod "4df39ed0-9850-4409-8648-724b15671640" (UID: "4df39ed0-9850-4409-8648-724b15671640"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.498218 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-574f89688c-hbh7m" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.506307 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4df39ed0-9850-4409-8648-724b15671640-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4df39ed0-9850-4409-8648-724b15671640" (UID: "4df39ed0-9850-4409-8648-724b15671640"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.517166 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4df39ed0-9850-4409-8648-724b15671640-config-data" (OuterVolumeSpecName: "config-data") pod "4df39ed0-9850-4409-8648-724b15671640" (UID: "4df39ed0-9850-4409-8648-724b15671640"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.521926 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.521983 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.576852 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4df39ed0-9850-4409-8648-724b15671640-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.576882 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4df39ed0-9850-4409-8648-724b15671640-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.576891 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4df39ed0-9850-4409-8648-724b15671640-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.576901 4742 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4df39ed0-9850-4409-8648-724b15671640-logs\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:02 crc kubenswrapper[4742]: I1205 06:12:02.576909 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xh44w\" (UniqueName: \"kubernetes.io/projected/4df39ed0-9850-4409-8648-724b15671640-kube-api-access-xh44w\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.127141 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-574f89688c-hbh7m"] Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.139164 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-m9skl" event={"ID":"8620635a-6cc9-4c28-9a23-46017882bcb2","Type":"ContainerStarted","Data":"d4b4d53a61d568d27cd31811531077ba4d61d36b10ce070641d0523f22a892e8"} Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.139219 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6b7b667979-m9skl" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.148803 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-j7dqz" event={"ID":"4df39ed0-9850-4409-8648-724b15671640","Type":"ContainerDied","Data":"c43440cb639fe912dee5837305ecde2a280633e7e48632e31f80581235c548ae"} Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.148844 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c43440cb639fe912dee5837305ecde2a280633e7e48632e31f80581235c548ae" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.148917 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-j7dqz" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.166080 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a3450678-40d0-44f3-bcd6-c9d5b773812b","Type":"ContainerStarted","Data":"911e97f444607219414c7206dea5e324e9af39fb5fc4cbb23881ea0ab567c41c"} Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.166257 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a3450678-40d0-44f3-bcd6-c9d5b773812b","Type":"ContainerStarted","Data":"dd6245598b0f9e8faf90c1422b43b1bf0fe55d37d9cee58ab9a62ae667550f33"} Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.166313 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.166657 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.188183 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6b7b667979-m9skl" podStartSLOduration=4.188159156 podStartE2EDuration="4.188159156s" podCreationTimestamp="2025-12-05 06:11:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:12:03.163972062 +0000 UTC m=+1199.076107134" watchObservedRunningTime="2025-12-05 06:12:03.188159156 +0000 UTC m=+1199.100294218" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.209341 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-765b847d64-jgxg4"] Dec 05 06:12:03 crc kubenswrapper[4742]: E1205 06:12:03.209739 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4df39ed0-9850-4409-8648-724b15671640" containerName="placement-db-sync" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.209757 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="4df39ed0-9850-4409-8648-724b15671640" containerName="placement-db-sync" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.209908 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="4df39ed0-9850-4409-8648-724b15671640" containerName="placement-db-sync" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.210804 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-765b847d64-jgxg4" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.224392 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.224399 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.226698 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.230465 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-765b847d64-jgxg4"] Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.231331 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-992rv" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.231340 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.298912 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e42757b3-029e-4fe9-917f-73331394524e-public-tls-certs\") pod \"placement-765b847d64-jgxg4\" (UID: \"e42757b3-029e-4fe9-917f-73331394524e\") " pod="openstack/placement-765b847d64-jgxg4" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.300525 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwdc9\" (UniqueName: \"kubernetes.io/projected/e42757b3-029e-4fe9-917f-73331394524e-kube-api-access-cwdc9\") pod \"placement-765b847d64-jgxg4\" (UID: \"e42757b3-029e-4fe9-917f-73331394524e\") " pod="openstack/placement-765b847d64-jgxg4" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.300613 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e42757b3-029e-4fe9-917f-73331394524e-combined-ca-bundle\") pod \"placement-765b847d64-jgxg4\" (UID: \"e42757b3-029e-4fe9-917f-73331394524e\") " pod="openstack/placement-765b847d64-jgxg4" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.300693 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e42757b3-029e-4fe9-917f-73331394524e-config-data\") pod \"placement-765b847d64-jgxg4\" (UID: \"e42757b3-029e-4fe9-917f-73331394524e\") " pod="openstack/placement-765b847d64-jgxg4" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.300762 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e42757b3-029e-4fe9-917f-73331394524e-scripts\") pod \"placement-765b847d64-jgxg4\" (UID: \"e42757b3-029e-4fe9-917f-73331394524e\") " pod="openstack/placement-765b847d64-jgxg4" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.300850 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e42757b3-029e-4fe9-917f-73331394524e-internal-tls-certs\") pod \"placement-765b847d64-jgxg4\" (UID: \"e42757b3-029e-4fe9-917f-73331394524e\") " pod="openstack/placement-765b847d64-jgxg4" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.301277 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e42757b3-029e-4fe9-917f-73331394524e-logs\") pod \"placement-765b847d64-jgxg4\" (UID: \"e42757b3-029e-4fe9-917f-73331394524e\") " pod="openstack/placement-765b847d64-jgxg4" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.402727 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e42757b3-029e-4fe9-917f-73331394524e-public-tls-certs\") pod \"placement-765b847d64-jgxg4\" (UID: \"e42757b3-029e-4fe9-917f-73331394524e\") " pod="openstack/placement-765b847d64-jgxg4" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.402998 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwdc9\" (UniqueName: \"kubernetes.io/projected/e42757b3-029e-4fe9-917f-73331394524e-kube-api-access-cwdc9\") pod \"placement-765b847d64-jgxg4\" (UID: \"e42757b3-029e-4fe9-917f-73331394524e\") " pod="openstack/placement-765b847d64-jgxg4" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.403036 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e42757b3-029e-4fe9-917f-73331394524e-combined-ca-bundle\") pod \"placement-765b847d64-jgxg4\" (UID: \"e42757b3-029e-4fe9-917f-73331394524e\") " pod="openstack/placement-765b847d64-jgxg4" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.403162 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e42757b3-029e-4fe9-917f-73331394524e-config-data\") pod \"placement-765b847d64-jgxg4\" (UID: \"e42757b3-029e-4fe9-917f-73331394524e\") " pod="openstack/placement-765b847d64-jgxg4" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.403182 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e42757b3-029e-4fe9-917f-73331394524e-scripts\") pod \"placement-765b847d64-jgxg4\" (UID: \"e42757b3-029e-4fe9-917f-73331394524e\") " pod="openstack/placement-765b847d64-jgxg4" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.403218 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e42757b3-029e-4fe9-917f-73331394524e-internal-tls-certs\") pod \"placement-765b847d64-jgxg4\" (UID: \"e42757b3-029e-4fe9-917f-73331394524e\") " pod="openstack/placement-765b847d64-jgxg4" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.403268 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e42757b3-029e-4fe9-917f-73331394524e-logs\") pod \"placement-765b847d64-jgxg4\" (UID: \"e42757b3-029e-4fe9-917f-73331394524e\") " pod="openstack/placement-765b847d64-jgxg4" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.403838 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e42757b3-029e-4fe9-917f-73331394524e-logs\") pod \"placement-765b847d64-jgxg4\" (UID: \"e42757b3-029e-4fe9-917f-73331394524e\") " pod="openstack/placement-765b847d64-jgxg4" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.407389 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e42757b3-029e-4fe9-917f-73331394524e-config-data\") pod \"placement-765b847d64-jgxg4\" (UID: \"e42757b3-029e-4fe9-917f-73331394524e\") " pod="openstack/placement-765b847d64-jgxg4" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.408191 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e42757b3-029e-4fe9-917f-73331394524e-combined-ca-bundle\") pod \"placement-765b847d64-jgxg4\" (UID: \"e42757b3-029e-4fe9-917f-73331394524e\") " pod="openstack/placement-765b847d64-jgxg4" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.408617 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e42757b3-029e-4fe9-917f-73331394524e-public-tls-certs\") pod \"placement-765b847d64-jgxg4\" (UID: \"e42757b3-029e-4fe9-917f-73331394524e\") " pod="openstack/placement-765b847d64-jgxg4" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.410431 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e42757b3-029e-4fe9-917f-73331394524e-internal-tls-certs\") pod \"placement-765b847d64-jgxg4\" (UID: \"e42757b3-029e-4fe9-917f-73331394524e\") " pod="openstack/placement-765b847d64-jgxg4" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.411336 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e42757b3-029e-4fe9-917f-73331394524e-scripts\") pod \"placement-765b847d64-jgxg4\" (UID: \"e42757b3-029e-4fe9-917f-73331394524e\") " pod="openstack/placement-765b847d64-jgxg4" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.421090 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwdc9\" (UniqueName: \"kubernetes.io/projected/e42757b3-029e-4fe9-917f-73331394524e-kube-api-access-cwdc9\") pod \"placement-765b847d64-jgxg4\" (UID: \"e42757b3-029e-4fe9-917f-73331394524e\") " pod="openstack/placement-765b847d64-jgxg4" Dec 05 06:12:03 crc kubenswrapper[4742]: I1205 06:12:03.562572 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-765b847d64-jgxg4" Dec 05 06:12:04 crc kubenswrapper[4742]: I1205 06:12:04.184687 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a3450678-40d0-44f3-bcd6-c9d5b773812b","Type":"ContainerStarted","Data":"d77cb7753d933ccde9dd454627520ab2dfcecd93ead766179bb9b76c07274bed"} Dec 05 06:12:04 crc kubenswrapper[4742]: I1205 06:12:04.186365 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-574f89688c-hbh7m" event={"ID":"65eece87-4279-4d6c-b2a6-5841fd5b3298","Type":"ContainerStarted","Data":"0f5251869335092c27ec478b7850c0b761a69b72c601c2774db88367eee9ef81"} Dec 05 06:12:04 crc kubenswrapper[4742]: I1205 06:12:04.186402 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-574f89688c-hbh7m" event={"ID":"65eece87-4279-4d6c-b2a6-5841fd5b3298","Type":"ContainerStarted","Data":"7dde166a73d5b76e2abad149fb208a207bbfb6c4d3f85860b86c5eececd4c718"} Dec 05 06:12:04 crc kubenswrapper[4742]: I1205 06:12:04.212920 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.212904562 podStartE2EDuration="3.212904562s" podCreationTimestamp="2025-12-05 06:12:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:12:04.203932823 +0000 UTC m=+1200.116067905" watchObservedRunningTime="2025-12-05 06:12:04.212904562 +0000 UTC m=+1200.125039614" Dec 05 06:12:05 crc kubenswrapper[4742]: I1205 06:12:05.986817 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 06:12:07 crc kubenswrapper[4742]: I1205 06:12:07.713127 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 06:12:08 crc kubenswrapper[4742]: I1205 06:12:08.246420 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-r2qnr" event={"ID":"3e2f2b5a-8ced-49df-ae20-f64d13a9938b","Type":"ContainerDied","Data":"4ae7ac540920f9b0156a982f5407566fd0a0de498fd371dd022c63f056484d48"} Dec 05 06:12:08 crc kubenswrapper[4742]: I1205 06:12:08.246839 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4ae7ac540920f9b0156a982f5407566fd0a0de498fd371dd022c63f056484d48" Dec 05 06:12:08 crc kubenswrapper[4742]: I1205 06:12:08.427345 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-r2qnr" Dec 05 06:12:08 crc kubenswrapper[4742]: I1205 06:12:08.568473 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-765b847d64-jgxg4"] Dec 05 06:12:08 crc kubenswrapper[4742]: W1205 06:12:08.577266 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode42757b3_029e_4fe9_917f_73331394524e.slice/crio-7bb76af7c587ed7791faf68a07caf2574f40d34888b30121d2ce7c723a1dd3d6 WatchSource:0}: Error finding container 7bb76af7c587ed7791faf68a07caf2574f40d34888b30121d2ce7c723a1dd3d6: Status 404 returned error can't find the container with id 7bb76af7c587ed7791faf68a07caf2574f40d34888b30121d2ce7c723a1dd3d6 Dec 05 06:12:08 crc kubenswrapper[4742]: I1205 06:12:08.593582 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-credential-keys\") pod \"3e2f2b5a-8ced-49df-ae20-f64d13a9938b\" (UID: \"3e2f2b5a-8ced-49df-ae20-f64d13a9938b\") " Dec 05 06:12:08 crc kubenswrapper[4742]: I1205 06:12:08.593960 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-combined-ca-bundle\") pod \"3e2f2b5a-8ced-49df-ae20-f64d13a9938b\" (UID: \"3e2f2b5a-8ced-49df-ae20-f64d13a9938b\") " Dec 05 06:12:08 crc kubenswrapper[4742]: I1205 06:12:08.593994 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-config-data\") pod \"3e2f2b5a-8ced-49df-ae20-f64d13a9938b\" (UID: \"3e2f2b5a-8ced-49df-ae20-f64d13a9938b\") " Dec 05 06:12:08 crc kubenswrapper[4742]: I1205 06:12:08.594017 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-scripts\") pod \"3e2f2b5a-8ced-49df-ae20-f64d13a9938b\" (UID: \"3e2f2b5a-8ced-49df-ae20-f64d13a9938b\") " Dec 05 06:12:08 crc kubenswrapper[4742]: I1205 06:12:08.594081 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-fernet-keys\") pod \"3e2f2b5a-8ced-49df-ae20-f64d13a9938b\" (UID: \"3e2f2b5a-8ced-49df-ae20-f64d13a9938b\") " Dec 05 06:12:08 crc kubenswrapper[4742]: I1205 06:12:08.594125 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gb9c7\" (UniqueName: \"kubernetes.io/projected/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-kube-api-access-gb9c7\") pod \"3e2f2b5a-8ced-49df-ae20-f64d13a9938b\" (UID: \"3e2f2b5a-8ced-49df-ae20-f64d13a9938b\") " Dec 05 06:12:08 crc kubenswrapper[4742]: I1205 06:12:08.600908 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "3e2f2b5a-8ced-49df-ae20-f64d13a9938b" (UID: "3e2f2b5a-8ced-49df-ae20-f64d13a9938b"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:08 crc kubenswrapper[4742]: I1205 06:12:08.600979 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "3e2f2b5a-8ced-49df-ae20-f64d13a9938b" (UID: "3e2f2b5a-8ced-49df-ae20-f64d13a9938b"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:08 crc kubenswrapper[4742]: I1205 06:12:08.600969 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-scripts" (OuterVolumeSpecName: "scripts") pod "3e2f2b5a-8ced-49df-ae20-f64d13a9938b" (UID: "3e2f2b5a-8ced-49df-ae20-f64d13a9938b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:08 crc kubenswrapper[4742]: I1205 06:12:08.601589 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-kube-api-access-gb9c7" (OuterVolumeSpecName: "kube-api-access-gb9c7") pod "3e2f2b5a-8ced-49df-ae20-f64d13a9938b" (UID: "3e2f2b5a-8ced-49df-ae20-f64d13a9938b"). InnerVolumeSpecName "kube-api-access-gb9c7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:12:08 crc kubenswrapper[4742]: I1205 06:12:08.628453 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-config-data" (OuterVolumeSpecName: "config-data") pod "3e2f2b5a-8ced-49df-ae20-f64d13a9938b" (UID: "3e2f2b5a-8ced-49df-ae20-f64d13a9938b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:08 crc kubenswrapper[4742]: I1205 06:12:08.634149 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3e2f2b5a-8ced-49df-ae20-f64d13a9938b" (UID: "3e2f2b5a-8ced-49df-ae20-f64d13a9938b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:08 crc kubenswrapper[4742]: I1205 06:12:08.696227 4742 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:08 crc kubenswrapper[4742]: I1205 06:12:08.696277 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:08 crc kubenswrapper[4742]: I1205 06:12:08.696683 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:08 crc kubenswrapper[4742]: I1205 06:12:08.696911 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:08 crc kubenswrapper[4742]: I1205 06:12:08.696950 4742 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:08 crc kubenswrapper[4742]: I1205 06:12:08.696969 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gb9c7\" (UniqueName: \"kubernetes.io/projected/3e2f2b5a-8ced-49df-ae20-f64d13a9938b-kube-api-access-gb9c7\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.258290 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-574f89688c-hbh7m" event={"ID":"65eece87-4279-4d6c-b2a6-5841fd5b3298","Type":"ContainerStarted","Data":"c3a0400780874ea1469dfe2dbe4742008a4997fce008a566e72b6e1f3a0d759f"} Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.258603 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-574f89688c-hbh7m" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.261367 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-765b847d64-jgxg4" event={"ID":"e42757b3-029e-4fe9-917f-73331394524e","Type":"ContainerStarted","Data":"87ced7f756fbe6fb669f5837d287cbfc896ceb71b65ba49a0991a9a56aa7f8a6"} Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.261426 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-765b847d64-jgxg4" event={"ID":"e42757b3-029e-4fe9-917f-73331394524e","Type":"ContainerStarted","Data":"23c49e25d43e79c7f8ade74991cb2aa015e0aae68fc08c3f8bd44099cbee5e4d"} Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.261453 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-765b847d64-jgxg4" event={"ID":"e42757b3-029e-4fe9-917f-73331394524e","Type":"ContainerStarted","Data":"7bb76af7c587ed7791faf68a07caf2574f40d34888b30121d2ce7c723a1dd3d6"} Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.261487 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-765b847d64-jgxg4" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.261511 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-765b847d64-jgxg4" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.264367 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-r2qnr" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.264413 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"388e7953-5819-4978-842c-1cf54fd568c9","Type":"ContainerStarted","Data":"d557b005e6d1d32413fda82b0019e6afea6e855c9c127c40dff0cb31227f0df8"} Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.294382 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-574f89688c-hbh7m" podStartSLOduration=7.294319696 podStartE2EDuration="7.294319696s" podCreationTimestamp="2025-12-05 06:12:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:12:09.28471623 +0000 UTC m=+1205.196851302" watchObservedRunningTime="2025-12-05 06:12:09.294319696 +0000 UTC m=+1205.206454788" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.314919 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-765b847d64-jgxg4" podStartSLOduration=6.314904164 podStartE2EDuration="6.314904164s" podCreationTimestamp="2025-12-05 06:12:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:12:09.307063776 +0000 UTC m=+1205.219198838" watchObservedRunningTime="2025-12-05 06:12:09.314904164 +0000 UTC m=+1205.227039226" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.537669 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-655b696477-tbv7n"] Dec 05 06:12:09 crc kubenswrapper[4742]: E1205 06:12:09.538142 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e2f2b5a-8ced-49df-ae20-f64d13a9938b" containerName="keystone-bootstrap" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.538162 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e2f2b5a-8ced-49df-ae20-f64d13a9938b" containerName="keystone-bootstrap" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.538313 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e2f2b5a-8ced-49df-ae20-f64d13a9938b" containerName="keystone-bootstrap" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.538866 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-655b696477-tbv7n" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.540471 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.541161 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.541773 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.541979 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-dbvnt" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.542625 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.551547 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.577544 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-655b696477-tbv7n"] Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.722662 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-credential-keys\") pod \"keystone-655b696477-tbv7n\" (UID: \"8d993905-0c76-454d-8eac-8a93674522db\") " pod="openstack/keystone-655b696477-tbv7n" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.722728 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhcz5\" (UniqueName: \"kubernetes.io/projected/8d993905-0c76-454d-8eac-8a93674522db-kube-api-access-qhcz5\") pod \"keystone-655b696477-tbv7n\" (UID: \"8d993905-0c76-454d-8eac-8a93674522db\") " pod="openstack/keystone-655b696477-tbv7n" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.722760 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-scripts\") pod \"keystone-655b696477-tbv7n\" (UID: \"8d993905-0c76-454d-8eac-8a93674522db\") " pod="openstack/keystone-655b696477-tbv7n" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.722778 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-combined-ca-bundle\") pod \"keystone-655b696477-tbv7n\" (UID: \"8d993905-0c76-454d-8eac-8a93674522db\") " pod="openstack/keystone-655b696477-tbv7n" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.722808 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-fernet-keys\") pod \"keystone-655b696477-tbv7n\" (UID: \"8d993905-0c76-454d-8eac-8a93674522db\") " pod="openstack/keystone-655b696477-tbv7n" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.723123 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-internal-tls-certs\") pod \"keystone-655b696477-tbv7n\" (UID: \"8d993905-0c76-454d-8eac-8a93674522db\") " pod="openstack/keystone-655b696477-tbv7n" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.723167 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-config-data\") pod \"keystone-655b696477-tbv7n\" (UID: \"8d993905-0c76-454d-8eac-8a93674522db\") " pod="openstack/keystone-655b696477-tbv7n" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.723222 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-public-tls-certs\") pod \"keystone-655b696477-tbv7n\" (UID: \"8d993905-0c76-454d-8eac-8a93674522db\") " pod="openstack/keystone-655b696477-tbv7n" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.824609 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-internal-tls-certs\") pod \"keystone-655b696477-tbv7n\" (UID: \"8d993905-0c76-454d-8eac-8a93674522db\") " pod="openstack/keystone-655b696477-tbv7n" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.824656 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-config-data\") pod \"keystone-655b696477-tbv7n\" (UID: \"8d993905-0c76-454d-8eac-8a93674522db\") " pod="openstack/keystone-655b696477-tbv7n" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.824688 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-public-tls-certs\") pod \"keystone-655b696477-tbv7n\" (UID: \"8d993905-0c76-454d-8eac-8a93674522db\") " pod="openstack/keystone-655b696477-tbv7n" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.824717 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-credential-keys\") pod \"keystone-655b696477-tbv7n\" (UID: \"8d993905-0c76-454d-8eac-8a93674522db\") " pod="openstack/keystone-655b696477-tbv7n" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.824748 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhcz5\" (UniqueName: \"kubernetes.io/projected/8d993905-0c76-454d-8eac-8a93674522db-kube-api-access-qhcz5\") pod \"keystone-655b696477-tbv7n\" (UID: \"8d993905-0c76-454d-8eac-8a93674522db\") " pod="openstack/keystone-655b696477-tbv7n" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.824775 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-scripts\") pod \"keystone-655b696477-tbv7n\" (UID: \"8d993905-0c76-454d-8eac-8a93674522db\") " pod="openstack/keystone-655b696477-tbv7n" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.824792 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-combined-ca-bundle\") pod \"keystone-655b696477-tbv7n\" (UID: \"8d993905-0c76-454d-8eac-8a93674522db\") " pod="openstack/keystone-655b696477-tbv7n" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.824819 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-fernet-keys\") pod \"keystone-655b696477-tbv7n\" (UID: \"8d993905-0c76-454d-8eac-8a93674522db\") " pod="openstack/keystone-655b696477-tbv7n" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.831167 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-internal-tls-certs\") pod \"keystone-655b696477-tbv7n\" (UID: \"8d993905-0c76-454d-8eac-8a93674522db\") " pod="openstack/keystone-655b696477-tbv7n" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.831869 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-credential-keys\") pod \"keystone-655b696477-tbv7n\" (UID: \"8d993905-0c76-454d-8eac-8a93674522db\") " pod="openstack/keystone-655b696477-tbv7n" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.833021 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-config-data\") pod \"keystone-655b696477-tbv7n\" (UID: \"8d993905-0c76-454d-8eac-8a93674522db\") " pod="openstack/keystone-655b696477-tbv7n" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.833150 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-fernet-keys\") pod \"keystone-655b696477-tbv7n\" (UID: \"8d993905-0c76-454d-8eac-8a93674522db\") " pod="openstack/keystone-655b696477-tbv7n" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.833554 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-combined-ca-bundle\") pod \"keystone-655b696477-tbv7n\" (UID: \"8d993905-0c76-454d-8eac-8a93674522db\") " pod="openstack/keystone-655b696477-tbv7n" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.834221 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-public-tls-certs\") pod \"keystone-655b696477-tbv7n\" (UID: \"8d993905-0c76-454d-8eac-8a93674522db\") " pod="openstack/keystone-655b696477-tbv7n" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.841945 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-scripts\") pod \"keystone-655b696477-tbv7n\" (UID: \"8d993905-0c76-454d-8eac-8a93674522db\") " pod="openstack/keystone-655b696477-tbv7n" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.846393 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhcz5\" (UniqueName: \"kubernetes.io/projected/8d993905-0c76-454d-8eac-8a93674522db-kube-api-access-qhcz5\") pod \"keystone-655b696477-tbv7n\" (UID: \"8d993905-0c76-454d-8eac-8a93674522db\") " pod="openstack/keystone-655b696477-tbv7n" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.884588 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-655b696477-tbv7n" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.907332 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6b7b667979-m9skl" Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.983678 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-6xnkh"] Dec 05 06:12:09 crc kubenswrapper[4742]: I1205 06:12:09.984224 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-56df8fb6b7-6xnkh" podUID="d4b22256-5998-4055-acd4-6828f54186f8" containerName="dnsmasq-dns" containerID="cri-o://9eff3ddcc392c899c5f8d811e5f85f3a34be1babc52bc43ed880f88ea93b5636" gracePeriod=10 Dec 05 06:12:10 crc kubenswrapper[4742]: I1205 06:12:10.285119 4742 generic.go:334] "Generic (PLEG): container finished" podID="d4b22256-5998-4055-acd4-6828f54186f8" containerID="9eff3ddcc392c899c5f8d811e5f85f3a34be1babc52bc43ed880f88ea93b5636" exitCode=0 Dec 05 06:12:10 crc kubenswrapper[4742]: I1205 06:12:10.285176 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-6xnkh" event={"ID":"d4b22256-5998-4055-acd4-6828f54186f8","Type":"ContainerDied","Data":"9eff3ddcc392c899c5f8d811e5f85f3a34be1babc52bc43ed880f88ea93b5636"} Dec 05 06:12:10 crc kubenswrapper[4742]: I1205 06:12:10.286131 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-22vpx" event={"ID":"1e8bface-3ae8-4a16-85c0-eca434ca57f1","Type":"ContainerStarted","Data":"40e5259c068c72cb7a6feea8562a4bf25bb56c6f5b866eb1a774f17692a3f47c"} Dec 05 06:12:10 crc kubenswrapper[4742]: I1205 06:12:10.415073 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-22vpx" podStartSLOduration=3.6687207600000002 podStartE2EDuration="35.415041737s" podCreationTimestamp="2025-12-05 06:11:35 +0000 UTC" firstStartedPulling="2025-12-05 06:11:37.250845047 +0000 UTC m=+1173.162980109" lastFinishedPulling="2025-12-05 06:12:08.997166004 +0000 UTC m=+1204.909301086" observedRunningTime="2025-12-05 06:12:10.312389805 +0000 UTC m=+1206.224524877" watchObservedRunningTime="2025-12-05 06:12:10.415041737 +0000 UTC m=+1206.327176799" Dec 05 06:12:10 crc kubenswrapper[4742]: I1205 06:12:10.417284 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-655b696477-tbv7n"] Dec 05 06:12:10 crc kubenswrapper[4742]: I1205 06:12:10.636459 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-56df8fb6b7-6xnkh" podUID="d4b22256-5998-4055-acd4-6828f54186f8" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.143:5353: connect: connection refused" Dec 05 06:12:11 crc kubenswrapper[4742]: I1205 06:12:11.306608 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-655b696477-tbv7n" event={"ID":"8d993905-0c76-454d-8eac-8a93674522db","Type":"ContainerStarted","Data":"19b3e95c718c3665ff0670d2d832d7d93791bf4dcc4a5e276fe859096cdb96f7"} Dec 05 06:12:11 crc kubenswrapper[4742]: I1205 06:12:11.306892 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-655b696477-tbv7n" event={"ID":"8d993905-0c76-454d-8eac-8a93674522db","Type":"ContainerStarted","Data":"99f87f31d623370fcc74d863175c5b38300b8dcb1e298344617db859b13f9406"} Dec 05 06:12:11 crc kubenswrapper[4742]: I1205 06:12:11.307787 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-655b696477-tbv7n" Dec 05 06:12:11 crc kubenswrapper[4742]: I1205 06:12:11.325546 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-655b696477-tbv7n" podStartSLOduration=2.325529431 podStartE2EDuration="2.325529431s" podCreationTimestamp="2025-12-05 06:12:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:12:11.324891914 +0000 UTC m=+1207.237026976" watchObservedRunningTime="2025-12-05 06:12:11.325529431 +0000 UTC m=+1207.237664493" Dec 05 06:12:11 crc kubenswrapper[4742]: I1205 06:12:11.475852 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-6xnkh" Dec 05 06:12:11 crc kubenswrapper[4742]: I1205 06:12:11.560934 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zqlb9\" (UniqueName: \"kubernetes.io/projected/d4b22256-5998-4055-acd4-6828f54186f8-kube-api-access-zqlb9\") pod \"d4b22256-5998-4055-acd4-6828f54186f8\" (UID: \"d4b22256-5998-4055-acd4-6828f54186f8\") " Dec 05 06:12:11 crc kubenswrapper[4742]: I1205 06:12:11.561157 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d4b22256-5998-4055-acd4-6828f54186f8-ovsdbserver-sb\") pod \"d4b22256-5998-4055-acd4-6828f54186f8\" (UID: \"d4b22256-5998-4055-acd4-6828f54186f8\") " Dec 05 06:12:11 crc kubenswrapper[4742]: I1205 06:12:11.561184 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d4b22256-5998-4055-acd4-6828f54186f8-ovsdbserver-nb\") pod \"d4b22256-5998-4055-acd4-6828f54186f8\" (UID: \"d4b22256-5998-4055-acd4-6828f54186f8\") " Dec 05 06:12:11 crc kubenswrapper[4742]: I1205 06:12:11.561219 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4b22256-5998-4055-acd4-6828f54186f8-config\") pod \"d4b22256-5998-4055-acd4-6828f54186f8\" (UID: \"d4b22256-5998-4055-acd4-6828f54186f8\") " Dec 05 06:12:11 crc kubenswrapper[4742]: I1205 06:12:11.561246 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4b22256-5998-4055-acd4-6828f54186f8-dns-svc\") pod \"d4b22256-5998-4055-acd4-6828f54186f8\" (UID: \"d4b22256-5998-4055-acd4-6828f54186f8\") " Dec 05 06:12:11 crc kubenswrapper[4742]: I1205 06:12:11.561270 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d4b22256-5998-4055-acd4-6828f54186f8-dns-swift-storage-0\") pod \"d4b22256-5998-4055-acd4-6828f54186f8\" (UID: \"d4b22256-5998-4055-acd4-6828f54186f8\") " Dec 05 06:12:11 crc kubenswrapper[4742]: I1205 06:12:11.566659 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4b22256-5998-4055-acd4-6828f54186f8-kube-api-access-zqlb9" (OuterVolumeSpecName: "kube-api-access-zqlb9") pod "d4b22256-5998-4055-acd4-6828f54186f8" (UID: "d4b22256-5998-4055-acd4-6828f54186f8"). InnerVolumeSpecName "kube-api-access-zqlb9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:12:11 crc kubenswrapper[4742]: I1205 06:12:11.568846 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 06:12:11 crc kubenswrapper[4742]: I1205 06:12:11.568891 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 06:12:11 crc kubenswrapper[4742]: I1205 06:12:11.622500 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4b22256-5998-4055-acd4-6828f54186f8-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d4b22256-5998-4055-acd4-6828f54186f8" (UID: "d4b22256-5998-4055-acd4-6828f54186f8"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:12:11 crc kubenswrapper[4742]: I1205 06:12:11.623981 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4b22256-5998-4055-acd4-6828f54186f8-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d4b22256-5998-4055-acd4-6828f54186f8" (UID: "d4b22256-5998-4055-acd4-6828f54186f8"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:12:11 crc kubenswrapper[4742]: I1205 06:12:11.627233 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 06:12:11 crc kubenswrapper[4742]: I1205 06:12:11.640745 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 06:12:11 crc kubenswrapper[4742]: I1205 06:12:11.660513 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4b22256-5998-4055-acd4-6828f54186f8-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d4b22256-5998-4055-acd4-6828f54186f8" (UID: "d4b22256-5998-4055-acd4-6828f54186f8"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:12:11 crc kubenswrapper[4742]: I1205 06:12:11.663338 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4b22256-5998-4055-acd4-6828f54186f8-config" (OuterVolumeSpecName: "config") pod "d4b22256-5998-4055-acd4-6828f54186f8" (UID: "d4b22256-5998-4055-acd4-6828f54186f8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:12:11 crc kubenswrapper[4742]: I1205 06:12:11.665807 4742 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d4b22256-5998-4055-acd4-6828f54186f8-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:11 crc kubenswrapper[4742]: I1205 06:12:11.665843 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zqlb9\" (UniqueName: \"kubernetes.io/projected/d4b22256-5998-4055-acd4-6828f54186f8-kube-api-access-zqlb9\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:11 crc kubenswrapper[4742]: I1205 06:12:11.665854 4742 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d4b22256-5998-4055-acd4-6828f54186f8-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:11 crc kubenswrapper[4742]: I1205 06:12:11.665864 4742 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d4b22256-5998-4055-acd4-6828f54186f8-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:11 crc kubenswrapper[4742]: I1205 06:12:11.665873 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4b22256-5998-4055-acd4-6828f54186f8-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:11 crc kubenswrapper[4742]: I1205 06:12:11.696715 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4b22256-5998-4055-acd4-6828f54186f8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d4b22256-5998-4055-acd4-6828f54186f8" (UID: "d4b22256-5998-4055-acd4-6828f54186f8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:12:11 crc kubenswrapper[4742]: I1205 06:12:11.767297 4742 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4b22256-5998-4055-acd4-6828f54186f8-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:12 crc kubenswrapper[4742]: I1205 06:12:12.319160 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-6xnkh" Dec 05 06:12:12 crc kubenswrapper[4742]: I1205 06:12:12.320317 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-6xnkh" event={"ID":"d4b22256-5998-4055-acd4-6828f54186f8","Type":"ContainerDied","Data":"d8dafcc2ab495847b39f6f4c197785bb978ee409a35eb660f86d0218720417a4"} Dec 05 06:12:12 crc kubenswrapper[4742]: I1205 06:12:12.320387 4742 scope.go:117] "RemoveContainer" containerID="9eff3ddcc392c899c5f8d811e5f85f3a34be1babc52bc43ed880f88ea93b5636" Dec 05 06:12:12 crc kubenswrapper[4742]: I1205 06:12:12.320660 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 06:12:12 crc kubenswrapper[4742]: I1205 06:12:12.320796 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 06:12:12 crc kubenswrapper[4742]: I1205 06:12:12.362508 4742 scope.go:117] "RemoveContainer" containerID="92030834cb232289e8d37dfec9a1c8db6df6dc07deb8302e24edfdfeb74517fa" Dec 05 06:12:12 crc kubenswrapper[4742]: I1205 06:12:12.382267 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-6xnkh"] Dec 05 06:12:12 crc kubenswrapper[4742]: I1205 06:12:12.405004 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-6xnkh"] Dec 05 06:12:13 crc kubenswrapper[4742]: E1205 06:12:13.019184 4742 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1e8bface_3ae8_4a16_85c0_eca434ca57f1.slice/crio-conmon-40e5259c068c72cb7a6feea8562a4bf25bb56c6f5b866eb1a774f17692a3f47c.scope\": RecentStats: unable to find data in memory cache]" Dec 05 06:12:13 crc kubenswrapper[4742]: I1205 06:12:13.331358 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-4svn8" event={"ID":"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1","Type":"ContainerStarted","Data":"f581ecbf186f74f8dbb952b910932bfb53c37263797de4787a69cb0e9a0b066d"} Dec 05 06:12:13 crc kubenswrapper[4742]: I1205 06:12:13.339445 4742 generic.go:334] "Generic (PLEG): container finished" podID="1e8bface-3ae8-4a16-85c0-eca434ca57f1" containerID="40e5259c068c72cb7a6feea8562a4bf25bb56c6f5b866eb1a774f17692a3f47c" exitCode=0 Dec 05 06:12:13 crc kubenswrapper[4742]: I1205 06:12:13.339517 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-22vpx" event={"ID":"1e8bface-3ae8-4a16-85c0-eca434ca57f1","Type":"ContainerDied","Data":"40e5259c068c72cb7a6feea8562a4bf25bb56c6f5b866eb1a774f17692a3f47c"} Dec 05 06:12:13 crc kubenswrapper[4742]: I1205 06:12:13.374975 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-4svn8" podStartSLOduration=3.537758937 podStartE2EDuration="39.374958642s" podCreationTimestamp="2025-12-05 06:11:34 +0000 UTC" firstStartedPulling="2025-12-05 06:11:36.019595102 +0000 UTC m=+1171.931730164" lastFinishedPulling="2025-12-05 06:12:11.856794807 +0000 UTC m=+1207.768929869" observedRunningTime="2025-12-05 06:12:13.353288905 +0000 UTC m=+1209.265423967" watchObservedRunningTime="2025-12-05 06:12:13.374958642 +0000 UTC m=+1209.287093704" Dec 05 06:12:14 crc kubenswrapper[4742]: I1205 06:12:14.276736 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 06:12:14 crc kubenswrapper[4742]: I1205 06:12:14.280528 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 06:12:14 crc kubenswrapper[4742]: I1205 06:12:14.394582 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4b22256-5998-4055-acd4-6828f54186f8" path="/var/lib/kubelet/pods/d4b22256-5998-4055-acd4-6828f54186f8/volumes" Dec 05 06:12:16 crc kubenswrapper[4742]: I1205 06:12:16.832980 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-22vpx" Dec 05 06:12:16 crc kubenswrapper[4742]: I1205 06:12:16.966410 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e8bface-3ae8-4a16-85c0-eca434ca57f1-combined-ca-bundle\") pod \"1e8bface-3ae8-4a16-85c0-eca434ca57f1\" (UID: \"1e8bface-3ae8-4a16-85c0-eca434ca57f1\") " Dec 05 06:12:16 crc kubenswrapper[4742]: I1205 06:12:16.966541 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1e8bface-3ae8-4a16-85c0-eca434ca57f1-db-sync-config-data\") pod \"1e8bface-3ae8-4a16-85c0-eca434ca57f1\" (UID: \"1e8bface-3ae8-4a16-85c0-eca434ca57f1\") " Dec 05 06:12:16 crc kubenswrapper[4742]: I1205 06:12:16.966576 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q2cdd\" (UniqueName: \"kubernetes.io/projected/1e8bface-3ae8-4a16-85c0-eca434ca57f1-kube-api-access-q2cdd\") pod \"1e8bface-3ae8-4a16-85c0-eca434ca57f1\" (UID: \"1e8bface-3ae8-4a16-85c0-eca434ca57f1\") " Dec 05 06:12:16 crc kubenswrapper[4742]: I1205 06:12:16.972199 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e8bface-3ae8-4a16-85c0-eca434ca57f1-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "1e8bface-3ae8-4a16-85c0-eca434ca57f1" (UID: "1e8bface-3ae8-4a16-85c0-eca434ca57f1"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:16 crc kubenswrapper[4742]: I1205 06:12:16.975376 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e8bface-3ae8-4a16-85c0-eca434ca57f1-kube-api-access-q2cdd" (OuterVolumeSpecName: "kube-api-access-q2cdd") pod "1e8bface-3ae8-4a16-85c0-eca434ca57f1" (UID: "1e8bface-3ae8-4a16-85c0-eca434ca57f1"). InnerVolumeSpecName "kube-api-access-q2cdd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:12:17 crc kubenswrapper[4742]: I1205 06:12:17.013184 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e8bface-3ae8-4a16-85c0-eca434ca57f1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1e8bface-3ae8-4a16-85c0-eca434ca57f1" (UID: "1e8bface-3ae8-4a16-85c0-eca434ca57f1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:17 crc kubenswrapper[4742]: I1205 06:12:17.068808 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e8bface-3ae8-4a16-85c0-eca434ca57f1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:17 crc kubenswrapper[4742]: I1205 06:12:17.068862 4742 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/1e8bface-3ae8-4a16-85c0-eca434ca57f1-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:17 crc kubenswrapper[4742]: I1205 06:12:17.068876 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q2cdd\" (UniqueName: \"kubernetes.io/projected/1e8bface-3ae8-4a16-85c0-eca434ca57f1-kube-api-access-q2cdd\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:17 crc kubenswrapper[4742]: I1205 06:12:17.379317 4742 generic.go:334] "Generic (PLEG): container finished" podID="b0730438-d5e9-48c6-b5d1-280b1fb0f4b1" containerID="f581ecbf186f74f8dbb952b910932bfb53c37263797de4787a69cb0e9a0b066d" exitCode=0 Dec 05 06:12:17 crc kubenswrapper[4742]: I1205 06:12:17.379454 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-4svn8" event={"ID":"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1","Type":"ContainerDied","Data":"f581ecbf186f74f8dbb952b910932bfb53c37263797de4787a69cb0e9a0b066d"} Dec 05 06:12:17 crc kubenswrapper[4742]: I1205 06:12:17.381426 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-22vpx" event={"ID":"1e8bface-3ae8-4a16-85c0-eca434ca57f1","Type":"ContainerDied","Data":"8b0c8c1bcc964a26b542b7911e1d3db6cc9db4ad3d64c63024775c7227922062"} Dec 05 06:12:17 crc kubenswrapper[4742]: I1205 06:12:17.381464 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-22vpx" Dec 05 06:12:17 crc kubenswrapper[4742]: I1205 06:12:17.381470 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8b0c8c1bcc964a26b542b7911e1d3db6cc9db4ad3d64c63024775c7227922062" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.090321 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-66f7f988b5-b5pzf"] Dec 05 06:12:18 crc kubenswrapper[4742]: E1205 06:12:18.090730 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4b22256-5998-4055-acd4-6828f54186f8" containerName="dnsmasq-dns" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.090742 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4b22256-5998-4055-acd4-6828f54186f8" containerName="dnsmasq-dns" Dec 05 06:12:18 crc kubenswrapper[4742]: E1205 06:12:18.090755 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e8bface-3ae8-4a16-85c0-eca434ca57f1" containerName="barbican-db-sync" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.090760 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e8bface-3ae8-4a16-85c0-eca434ca57f1" containerName="barbican-db-sync" Dec 05 06:12:18 crc kubenswrapper[4742]: E1205 06:12:18.090771 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4b22256-5998-4055-acd4-6828f54186f8" containerName="init" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.090776 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4b22256-5998-4055-acd4-6828f54186f8" containerName="init" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.090942 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e8bface-3ae8-4a16-85c0-eca434ca57f1" containerName="barbican-db-sync" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.090958 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4b22256-5998-4055-acd4-6828f54186f8" containerName="dnsmasq-dns" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.091854 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-66f7f988b5-b5pzf" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.096508 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-7rss2" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.096778 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.097003 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.115039 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-7bd94f978b-h9cm5"] Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.116978 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7bd94f978b-h9cm5" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.122635 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.128815 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-66f7f988b5-b5pzf"] Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.139120 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7bd94f978b-h9cm5"] Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.170700 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-tw5cz"] Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.172570 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-tw5cz" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.188175 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f4c3ae5-d78c-4ddb-953d-cbee5b815be9-logs\") pod \"barbican-keystone-listener-7bd94f978b-h9cm5\" (UID: \"2f4c3ae5-d78c-4ddb-953d-cbee5b815be9\") " pod="openstack/barbican-keystone-listener-7bd94f978b-h9cm5" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.188254 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f4c3ae5-d78c-4ddb-953d-cbee5b815be9-combined-ca-bundle\") pod \"barbican-keystone-listener-7bd94f978b-h9cm5\" (UID: \"2f4c3ae5-d78c-4ddb-953d-cbee5b815be9\") " pod="openstack/barbican-keystone-listener-7bd94f978b-h9cm5" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.188294 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2f4c3ae5-d78c-4ddb-953d-cbee5b815be9-config-data-custom\") pod \"barbican-keystone-listener-7bd94f978b-h9cm5\" (UID: \"2f4c3ae5-d78c-4ddb-953d-cbee5b815be9\") " pod="openstack/barbican-keystone-listener-7bd94f978b-h9cm5" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.188320 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa30e851-f383-42c0-9e09-d8c896ed77ad-combined-ca-bundle\") pod \"barbican-worker-66f7f988b5-b5pzf\" (UID: \"fa30e851-f383-42c0-9e09-d8c896ed77ad\") " pod="openstack/barbican-worker-66f7f988b5-b5pzf" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.188409 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c289g\" (UniqueName: \"kubernetes.io/projected/2f4c3ae5-d78c-4ddb-953d-cbee5b815be9-kube-api-access-c289g\") pod \"barbican-keystone-listener-7bd94f978b-h9cm5\" (UID: \"2f4c3ae5-d78c-4ddb-953d-cbee5b815be9\") " pod="openstack/barbican-keystone-listener-7bd94f978b-h9cm5" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.188452 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa30e851-f383-42c0-9e09-d8c896ed77ad-config-data\") pod \"barbican-worker-66f7f988b5-b5pzf\" (UID: \"fa30e851-f383-42c0-9e09-d8c896ed77ad\") " pod="openstack/barbican-worker-66f7f988b5-b5pzf" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.188474 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56xlt\" (UniqueName: \"kubernetes.io/projected/fa30e851-f383-42c0-9e09-d8c896ed77ad-kube-api-access-56xlt\") pod \"barbican-worker-66f7f988b5-b5pzf\" (UID: \"fa30e851-f383-42c0-9e09-d8c896ed77ad\") " pod="openstack/barbican-worker-66f7f988b5-b5pzf" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.188531 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa30e851-f383-42c0-9e09-d8c896ed77ad-logs\") pod \"barbican-worker-66f7f988b5-b5pzf\" (UID: \"fa30e851-f383-42c0-9e09-d8c896ed77ad\") " pod="openstack/barbican-worker-66f7f988b5-b5pzf" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.188558 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fa30e851-f383-42c0-9e09-d8c896ed77ad-config-data-custom\") pod \"barbican-worker-66f7f988b5-b5pzf\" (UID: \"fa30e851-f383-42c0-9e09-d8c896ed77ad\") " pod="openstack/barbican-worker-66f7f988b5-b5pzf" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.188605 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f4c3ae5-d78c-4ddb-953d-cbee5b815be9-config-data\") pod \"barbican-keystone-listener-7bd94f978b-h9cm5\" (UID: \"2f4c3ae5-d78c-4ddb-953d-cbee5b815be9\") " pod="openstack/barbican-keystone-listener-7bd94f978b-h9cm5" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.204759 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-tw5cz"] Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.290929 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c289g\" (UniqueName: \"kubernetes.io/projected/2f4c3ae5-d78c-4ddb-953d-cbee5b815be9-kube-api-access-c289g\") pod \"barbican-keystone-listener-7bd94f978b-h9cm5\" (UID: \"2f4c3ae5-d78c-4ddb-953d-cbee5b815be9\") " pod="openstack/barbican-keystone-listener-7bd94f978b-h9cm5" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.290981 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-ovsdbserver-nb\") pod \"dnsmasq-dns-848cf88cfc-tw5cz\" (UID: \"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d\") " pod="openstack/dnsmasq-dns-848cf88cfc-tw5cz" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.291021 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa30e851-f383-42c0-9e09-d8c896ed77ad-config-data\") pod \"barbican-worker-66f7f988b5-b5pzf\" (UID: \"fa30e851-f383-42c0-9e09-d8c896ed77ad\") " pod="openstack/barbican-worker-66f7f988b5-b5pzf" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.291039 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56xlt\" (UniqueName: \"kubernetes.io/projected/fa30e851-f383-42c0-9e09-d8c896ed77ad-kube-api-access-56xlt\") pod \"barbican-worker-66f7f988b5-b5pzf\" (UID: \"fa30e851-f383-42c0-9e09-d8c896ed77ad\") " pod="openstack/barbican-worker-66f7f988b5-b5pzf" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.291096 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa30e851-f383-42c0-9e09-d8c896ed77ad-logs\") pod \"barbican-worker-66f7f988b5-b5pzf\" (UID: \"fa30e851-f383-42c0-9e09-d8c896ed77ad\") " pod="openstack/barbican-worker-66f7f988b5-b5pzf" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.291117 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fa30e851-f383-42c0-9e09-d8c896ed77ad-config-data-custom\") pod \"barbican-worker-66f7f988b5-b5pzf\" (UID: \"fa30e851-f383-42c0-9e09-d8c896ed77ad\") " pod="openstack/barbican-worker-66f7f988b5-b5pzf" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.291135 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-ovsdbserver-sb\") pod \"dnsmasq-dns-848cf88cfc-tw5cz\" (UID: \"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d\") " pod="openstack/dnsmasq-dns-848cf88cfc-tw5cz" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.291167 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f4c3ae5-d78c-4ddb-953d-cbee5b815be9-config-data\") pod \"barbican-keystone-listener-7bd94f978b-h9cm5\" (UID: \"2f4c3ae5-d78c-4ddb-953d-cbee5b815be9\") " pod="openstack/barbican-keystone-listener-7bd94f978b-h9cm5" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.291194 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-dns-svc\") pod \"dnsmasq-dns-848cf88cfc-tw5cz\" (UID: \"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d\") " pod="openstack/dnsmasq-dns-848cf88cfc-tw5cz" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.291210 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f4c3ae5-d78c-4ddb-953d-cbee5b815be9-logs\") pod \"barbican-keystone-listener-7bd94f978b-h9cm5\" (UID: \"2f4c3ae5-d78c-4ddb-953d-cbee5b815be9\") " pod="openstack/barbican-keystone-listener-7bd94f978b-h9cm5" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.291241 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f4c3ae5-d78c-4ddb-953d-cbee5b815be9-combined-ca-bundle\") pod \"barbican-keystone-listener-7bd94f978b-h9cm5\" (UID: \"2f4c3ae5-d78c-4ddb-953d-cbee5b815be9\") " pod="openstack/barbican-keystone-listener-7bd94f978b-h9cm5" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.291256 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2f4c3ae5-d78c-4ddb-953d-cbee5b815be9-config-data-custom\") pod \"barbican-keystone-listener-7bd94f978b-h9cm5\" (UID: \"2f4c3ae5-d78c-4ddb-953d-cbee5b815be9\") " pod="openstack/barbican-keystone-listener-7bd94f978b-h9cm5" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.291271 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa30e851-f383-42c0-9e09-d8c896ed77ad-combined-ca-bundle\") pod \"barbican-worker-66f7f988b5-b5pzf\" (UID: \"fa30e851-f383-42c0-9e09-d8c896ed77ad\") " pod="openstack/barbican-worker-66f7f988b5-b5pzf" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.291299 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-dns-swift-storage-0\") pod \"dnsmasq-dns-848cf88cfc-tw5cz\" (UID: \"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d\") " pod="openstack/dnsmasq-dns-848cf88cfc-tw5cz" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.291331 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfnrq\" (UniqueName: \"kubernetes.io/projected/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-kube-api-access-cfnrq\") pod \"dnsmasq-dns-848cf88cfc-tw5cz\" (UID: \"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d\") " pod="openstack/dnsmasq-dns-848cf88cfc-tw5cz" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.291348 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-config\") pod \"dnsmasq-dns-848cf88cfc-tw5cz\" (UID: \"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d\") " pod="openstack/dnsmasq-dns-848cf88cfc-tw5cz" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.292773 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f4c3ae5-d78c-4ddb-953d-cbee5b815be9-logs\") pod \"barbican-keystone-listener-7bd94f978b-h9cm5\" (UID: \"2f4c3ae5-d78c-4ddb-953d-cbee5b815be9\") " pod="openstack/barbican-keystone-listener-7bd94f978b-h9cm5" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.293250 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa30e851-f383-42c0-9e09-d8c896ed77ad-logs\") pod \"barbican-worker-66f7f988b5-b5pzf\" (UID: \"fa30e851-f383-42c0-9e09-d8c896ed77ad\") " pod="openstack/barbican-worker-66f7f988b5-b5pzf" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.297769 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fa30e851-f383-42c0-9e09-d8c896ed77ad-config-data-custom\") pod \"barbican-worker-66f7f988b5-b5pzf\" (UID: \"fa30e851-f383-42c0-9e09-d8c896ed77ad\") " pod="openstack/barbican-worker-66f7f988b5-b5pzf" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.299025 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa30e851-f383-42c0-9e09-d8c896ed77ad-config-data\") pod \"barbican-worker-66f7f988b5-b5pzf\" (UID: \"fa30e851-f383-42c0-9e09-d8c896ed77ad\") " pod="openstack/barbican-worker-66f7f988b5-b5pzf" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.299264 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f4c3ae5-d78c-4ddb-953d-cbee5b815be9-combined-ca-bundle\") pod \"barbican-keystone-listener-7bd94f978b-h9cm5\" (UID: \"2f4c3ae5-d78c-4ddb-953d-cbee5b815be9\") " pod="openstack/barbican-keystone-listener-7bd94f978b-h9cm5" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.299716 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa30e851-f383-42c0-9e09-d8c896ed77ad-combined-ca-bundle\") pod \"barbican-worker-66f7f988b5-b5pzf\" (UID: \"fa30e851-f383-42c0-9e09-d8c896ed77ad\") " pod="openstack/barbican-worker-66f7f988b5-b5pzf" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.300122 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6bd586bb98-kn6cb"] Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.305817 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f4c3ae5-d78c-4ddb-953d-cbee5b815be9-config-data\") pod \"barbican-keystone-listener-7bd94f978b-h9cm5\" (UID: \"2f4c3ae5-d78c-4ddb-953d-cbee5b815be9\") " pod="openstack/barbican-keystone-listener-7bd94f978b-h9cm5" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.306466 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6bd586bb98-kn6cb" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.307861 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2f4c3ae5-d78c-4ddb-953d-cbee5b815be9-config-data-custom\") pod \"barbican-keystone-listener-7bd94f978b-h9cm5\" (UID: \"2f4c3ae5-d78c-4ddb-953d-cbee5b815be9\") " pod="openstack/barbican-keystone-listener-7bd94f978b-h9cm5" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.308811 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.318929 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56xlt\" (UniqueName: \"kubernetes.io/projected/fa30e851-f383-42c0-9e09-d8c896ed77ad-kube-api-access-56xlt\") pod \"barbican-worker-66f7f988b5-b5pzf\" (UID: \"fa30e851-f383-42c0-9e09-d8c896ed77ad\") " pod="openstack/barbican-worker-66f7f988b5-b5pzf" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.327219 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6bd586bb98-kn6cb"] Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.330213 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c289g\" (UniqueName: \"kubernetes.io/projected/2f4c3ae5-d78c-4ddb-953d-cbee5b815be9-kube-api-access-c289g\") pod \"barbican-keystone-listener-7bd94f978b-h9cm5\" (UID: \"2f4c3ae5-d78c-4ddb-953d-cbee5b815be9\") " pod="openstack/barbican-keystone-listener-7bd94f978b-h9cm5" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.392498 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e1170877-6a94-478f-b6de-75fc8dd2c13e-config-data-custom\") pod \"barbican-api-6bd586bb98-kn6cb\" (UID: \"e1170877-6a94-478f-b6de-75fc8dd2c13e\") " pod="openstack/barbican-api-6bd586bb98-kn6cb" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.392562 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-dns-svc\") pod \"dnsmasq-dns-848cf88cfc-tw5cz\" (UID: \"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d\") " pod="openstack/dnsmasq-dns-848cf88cfc-tw5cz" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.392607 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1170877-6a94-478f-b6de-75fc8dd2c13e-logs\") pod \"barbican-api-6bd586bb98-kn6cb\" (UID: \"e1170877-6a94-478f-b6de-75fc8dd2c13e\") " pod="openstack/barbican-api-6bd586bb98-kn6cb" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.392646 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-dns-swift-storage-0\") pod \"dnsmasq-dns-848cf88cfc-tw5cz\" (UID: \"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d\") " pod="openstack/dnsmasq-dns-848cf88cfc-tw5cz" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.392709 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfnrq\" (UniqueName: \"kubernetes.io/projected/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-kube-api-access-cfnrq\") pod \"dnsmasq-dns-848cf88cfc-tw5cz\" (UID: \"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d\") " pod="openstack/dnsmasq-dns-848cf88cfc-tw5cz" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.392734 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-config\") pod \"dnsmasq-dns-848cf88cfc-tw5cz\" (UID: \"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d\") " pod="openstack/dnsmasq-dns-848cf88cfc-tw5cz" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.392872 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-ovsdbserver-nb\") pod \"dnsmasq-dns-848cf88cfc-tw5cz\" (UID: \"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d\") " pod="openstack/dnsmasq-dns-848cf88cfc-tw5cz" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.392893 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1170877-6a94-478f-b6de-75fc8dd2c13e-config-data\") pod \"barbican-api-6bd586bb98-kn6cb\" (UID: \"e1170877-6a94-478f-b6de-75fc8dd2c13e\") " pod="openstack/barbican-api-6bd586bb98-kn6cb" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.392930 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1170877-6a94-478f-b6de-75fc8dd2c13e-combined-ca-bundle\") pod \"barbican-api-6bd586bb98-kn6cb\" (UID: \"e1170877-6a94-478f-b6de-75fc8dd2c13e\") " pod="openstack/barbican-api-6bd586bb98-kn6cb" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.392966 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-ovsdbserver-sb\") pod \"dnsmasq-dns-848cf88cfc-tw5cz\" (UID: \"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d\") " pod="openstack/dnsmasq-dns-848cf88cfc-tw5cz" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.392987 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85qqq\" (UniqueName: \"kubernetes.io/projected/e1170877-6a94-478f-b6de-75fc8dd2c13e-kube-api-access-85qqq\") pod \"barbican-api-6bd586bb98-kn6cb\" (UID: \"e1170877-6a94-478f-b6de-75fc8dd2c13e\") " pod="openstack/barbican-api-6bd586bb98-kn6cb" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.393817 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-dns-svc\") pod \"dnsmasq-dns-848cf88cfc-tw5cz\" (UID: \"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d\") " pod="openstack/dnsmasq-dns-848cf88cfc-tw5cz" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.394389 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-dns-swift-storage-0\") pod \"dnsmasq-dns-848cf88cfc-tw5cz\" (UID: \"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d\") " pod="openstack/dnsmasq-dns-848cf88cfc-tw5cz" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.394993 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-ovsdbserver-sb\") pod \"dnsmasq-dns-848cf88cfc-tw5cz\" (UID: \"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d\") " pod="openstack/dnsmasq-dns-848cf88cfc-tw5cz" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.395672 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-config\") pod \"dnsmasq-dns-848cf88cfc-tw5cz\" (UID: \"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d\") " pod="openstack/dnsmasq-dns-848cf88cfc-tw5cz" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.396022 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-ovsdbserver-nb\") pod \"dnsmasq-dns-848cf88cfc-tw5cz\" (UID: \"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d\") " pod="openstack/dnsmasq-dns-848cf88cfc-tw5cz" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.416202 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-66f7f988b5-b5pzf" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.416633 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfnrq\" (UniqueName: \"kubernetes.io/projected/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-kube-api-access-cfnrq\") pod \"dnsmasq-dns-848cf88cfc-tw5cz\" (UID: \"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d\") " pod="openstack/dnsmasq-dns-848cf88cfc-tw5cz" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.436972 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7bd94f978b-h9cm5" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.493903 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85qqq\" (UniqueName: \"kubernetes.io/projected/e1170877-6a94-478f-b6de-75fc8dd2c13e-kube-api-access-85qqq\") pod \"barbican-api-6bd586bb98-kn6cb\" (UID: \"e1170877-6a94-478f-b6de-75fc8dd2c13e\") " pod="openstack/barbican-api-6bd586bb98-kn6cb" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.493960 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e1170877-6a94-478f-b6de-75fc8dd2c13e-config-data-custom\") pod \"barbican-api-6bd586bb98-kn6cb\" (UID: \"e1170877-6a94-478f-b6de-75fc8dd2c13e\") " pod="openstack/barbican-api-6bd586bb98-kn6cb" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.494003 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1170877-6a94-478f-b6de-75fc8dd2c13e-logs\") pod \"barbican-api-6bd586bb98-kn6cb\" (UID: \"e1170877-6a94-478f-b6de-75fc8dd2c13e\") " pod="openstack/barbican-api-6bd586bb98-kn6cb" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.494081 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1170877-6a94-478f-b6de-75fc8dd2c13e-config-data\") pod \"barbican-api-6bd586bb98-kn6cb\" (UID: \"e1170877-6a94-478f-b6de-75fc8dd2c13e\") " pod="openstack/barbican-api-6bd586bb98-kn6cb" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.494115 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1170877-6a94-478f-b6de-75fc8dd2c13e-combined-ca-bundle\") pod \"barbican-api-6bd586bb98-kn6cb\" (UID: \"e1170877-6a94-478f-b6de-75fc8dd2c13e\") " pod="openstack/barbican-api-6bd586bb98-kn6cb" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.496599 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-tw5cz" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.498310 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1170877-6a94-478f-b6de-75fc8dd2c13e-logs\") pod \"barbican-api-6bd586bb98-kn6cb\" (UID: \"e1170877-6a94-478f-b6de-75fc8dd2c13e\") " pod="openstack/barbican-api-6bd586bb98-kn6cb" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.498513 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e1170877-6a94-478f-b6de-75fc8dd2c13e-config-data-custom\") pod \"barbican-api-6bd586bb98-kn6cb\" (UID: \"e1170877-6a94-478f-b6de-75fc8dd2c13e\") " pod="openstack/barbican-api-6bd586bb98-kn6cb" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.502882 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1170877-6a94-478f-b6de-75fc8dd2c13e-combined-ca-bundle\") pod \"barbican-api-6bd586bb98-kn6cb\" (UID: \"e1170877-6a94-478f-b6de-75fc8dd2c13e\") " pod="openstack/barbican-api-6bd586bb98-kn6cb" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.505458 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1170877-6a94-478f-b6de-75fc8dd2c13e-config-data\") pod \"barbican-api-6bd586bb98-kn6cb\" (UID: \"e1170877-6a94-478f-b6de-75fc8dd2c13e\") " pod="openstack/barbican-api-6bd586bb98-kn6cb" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.518828 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85qqq\" (UniqueName: \"kubernetes.io/projected/e1170877-6a94-478f-b6de-75fc8dd2c13e-kube-api-access-85qqq\") pod \"barbican-api-6bd586bb98-kn6cb\" (UID: \"e1170877-6a94-478f-b6de-75fc8dd2c13e\") " pod="openstack/barbican-api-6bd586bb98-kn6cb" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.689488 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6bd586bb98-kn6cb" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.780894 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-4svn8" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.902613 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5vnp\" (UniqueName: \"kubernetes.io/projected/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-kube-api-access-l5vnp\") pod \"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1\" (UID: \"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1\") " Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.902688 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-combined-ca-bundle\") pod \"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1\" (UID: \"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1\") " Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.902711 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-db-sync-config-data\") pod \"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1\" (UID: \"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1\") " Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.902770 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-scripts\") pod \"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1\" (UID: \"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1\") " Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.902843 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-etc-machine-id\") pod \"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1\" (UID: \"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1\") " Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.902876 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-config-data\") pod \"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1\" (UID: \"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1\") " Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.912997 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "b0730438-d5e9-48c6-b5d1-280b1fb0f4b1" (UID: "b0730438-d5e9-48c6-b5d1-280b1fb0f4b1"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.917655 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-kube-api-access-l5vnp" (OuterVolumeSpecName: "kube-api-access-l5vnp") pod "b0730438-d5e9-48c6-b5d1-280b1fb0f4b1" (UID: "b0730438-d5e9-48c6-b5d1-280b1fb0f4b1"). InnerVolumeSpecName "kube-api-access-l5vnp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.917727 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-scripts" (OuterVolumeSpecName: "scripts") pod "b0730438-d5e9-48c6-b5d1-280b1fb0f4b1" (UID: "b0730438-d5e9-48c6-b5d1-280b1fb0f4b1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.917761 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "b0730438-d5e9-48c6-b5d1-280b1fb0f4b1" (UID: "b0730438-d5e9-48c6-b5d1-280b1fb0f4b1"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.945002 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b0730438-d5e9-48c6-b5d1-280b1fb0f4b1" (UID: "b0730438-d5e9-48c6-b5d1-280b1fb0f4b1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:18 crc kubenswrapper[4742]: I1205 06:12:18.977893 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-config-data" (OuterVolumeSpecName: "config-data") pod "b0730438-d5e9-48c6-b5d1-280b1fb0f4b1" (UID: "b0730438-d5e9-48c6-b5d1-280b1fb0f4b1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.005146 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.005235 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5vnp\" (UniqueName: \"kubernetes.io/projected/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-kube-api-access-l5vnp\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.005247 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.005257 4742 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.005265 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.005273 4742 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.404454 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6bd586bb98-kn6cb"] Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.410457 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"388e7953-5819-4978-842c-1cf54fd568c9","Type":"ContainerStarted","Data":"d012bb303825e51fab5876490727b6cbc42ec9194656ed2dbf2941cf5a0d71fa"} Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.410679 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="388e7953-5819-4978-842c-1cf54fd568c9" containerName="ceilometer-central-agent" containerID="cri-o://955cfd5120b0d6e13110d4d1a290597c0926bd70e91c83a93c339f051492b6ef" gracePeriod=30 Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.410773 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.411183 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="388e7953-5819-4978-842c-1cf54fd568c9" containerName="proxy-httpd" containerID="cri-o://d012bb303825e51fab5876490727b6cbc42ec9194656ed2dbf2941cf5a0d71fa" gracePeriod=30 Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.411240 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="388e7953-5819-4978-842c-1cf54fd568c9" containerName="sg-core" containerID="cri-o://d557b005e6d1d32413fda82b0019e6afea6e855c9c127c40dff0cb31227f0df8" gracePeriod=30 Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.411282 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="388e7953-5819-4978-842c-1cf54fd568c9" containerName="ceilometer-notification-agent" containerID="cri-o://4277d32a24ea03efda6807e804b77cafeebe7c67b2fec2c05c23aeeef287955d" gracePeriod=30 Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.430440 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-4svn8" event={"ID":"b0730438-d5e9-48c6-b5d1-280b1fb0f4b1","Type":"ContainerDied","Data":"e011265f600e54f0669f1087eb17e1e382aede2fbf661e342e9186c867e2fa87"} Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.430481 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e011265f600e54f0669f1087eb17e1e382aede2fbf661e342e9186c867e2fa87" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.430537 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-4svn8" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.449290 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.7970581129999998 podStartE2EDuration="44.449270474s" podCreationTimestamp="2025-12-05 06:11:35 +0000 UTC" firstStartedPulling="2025-12-05 06:11:36.254809715 +0000 UTC m=+1172.166944777" lastFinishedPulling="2025-12-05 06:12:18.907022076 +0000 UTC m=+1214.819157138" observedRunningTime="2025-12-05 06:12:19.432784395 +0000 UTC m=+1215.344919457" watchObservedRunningTime="2025-12-05 06:12:19.449270474 +0000 UTC m=+1215.361405536" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.479989 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-tw5cz"] Dec 05 06:12:19 crc kubenswrapper[4742]: W1205 06:12:19.486803 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd0f4d3ad_2d27_4c1c_b3e5_a81395d2f73d.slice/crio-9e0dbec906230bafc7535945a0c5e04429073020ab4f1c72eed9b1098650e4e3 WatchSource:0}: Error finding container 9e0dbec906230bafc7535945a0c5e04429073020ab4f1c72eed9b1098650e4e3: Status 404 returned error can't find the container with id 9e0dbec906230bafc7535945a0c5e04429073020ab4f1c72eed9b1098650e4e3 Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.489300 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7bd94f978b-h9cm5"] Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.615703 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-66f7f988b5-b5pzf"] Dec 05 06:12:19 crc kubenswrapper[4742]: W1205 06:12:19.637173 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfa30e851_f383_42c0_9e09_d8c896ed77ad.slice/crio-399de0c84bd1ba34d951be5e96866f611d5d76be3f684cfa27b1c3935f262f81 WatchSource:0}: Error finding container 399de0c84bd1ba34d951be5e96866f611d5d76be3f684cfa27b1c3935f262f81: Status 404 returned error can't find the container with id 399de0c84bd1ba34d951be5e96866f611d5d76be3f684cfa27b1c3935f262f81 Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.702200 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 06:12:19 crc kubenswrapper[4742]: E1205 06:12:19.702634 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0730438-d5e9-48c6-b5d1-280b1fb0f4b1" containerName="cinder-db-sync" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.702655 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0730438-d5e9-48c6-b5d1-280b1fb0f4b1" containerName="cinder-db-sync" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.702843 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0730438-d5e9-48c6-b5d1-280b1fb0f4b1" containerName="cinder-db-sync" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.704876 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.715719 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-5rhld" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.715956 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.716256 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.716407 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.753795 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.781502 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-tw5cz"] Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.823649 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-46pxh"] Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.825590 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-46pxh" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.832864 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-46pxh"] Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.833515 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acb321ed-2f39-4c68-bafd-1eabb744fee9-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"acb321ed-2f39-4c68-bafd-1eabb744fee9\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.833606 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/acb321ed-2f39-4c68-bafd-1eabb744fee9-scripts\") pod \"cinder-scheduler-0\" (UID: \"acb321ed-2f39-4c68-bafd-1eabb744fee9\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.833689 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acb321ed-2f39-4c68-bafd-1eabb744fee9-config-data\") pod \"cinder-scheduler-0\" (UID: \"acb321ed-2f39-4c68-bafd-1eabb744fee9\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.833794 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/acb321ed-2f39-4c68-bafd-1eabb744fee9-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"acb321ed-2f39-4c68-bafd-1eabb744fee9\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.833916 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8xf9\" (UniqueName: \"kubernetes.io/projected/acb321ed-2f39-4c68-bafd-1eabb744fee9-kube-api-access-g8xf9\") pod \"cinder-scheduler-0\" (UID: \"acb321ed-2f39-4c68-bafd-1eabb744fee9\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.834010 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/acb321ed-2f39-4c68-bafd-1eabb744fee9-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"acb321ed-2f39-4c68-bafd-1eabb744fee9\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.895115 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.897520 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.910499 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.920133 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.935921 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acb321ed-2f39-4c68-bafd-1eabb744fee9-config-data\") pod \"cinder-scheduler-0\" (UID: \"acb321ed-2f39-4c68-bafd-1eabb744fee9\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.936142 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-46pxh\" (UID: \"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2\") " pod="openstack/dnsmasq-dns-6578955fd5-46pxh" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.936227 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/acb321ed-2f39-4c68-bafd-1eabb744fee9-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"acb321ed-2f39-4c68-bafd-1eabb744fee9\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.936316 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8xf9\" (UniqueName: \"kubernetes.io/projected/acb321ed-2f39-4c68-bafd-1eabb744fee9-kube-api-access-g8xf9\") pod \"cinder-scheduler-0\" (UID: \"acb321ed-2f39-4c68-bafd-1eabb744fee9\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.936417 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/acb321ed-2f39-4c68-bafd-1eabb744fee9-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"acb321ed-2f39-4c68-bafd-1eabb744fee9\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.936530 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-config\") pod \"dnsmasq-dns-6578955fd5-46pxh\" (UID: \"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2\") " pod="openstack/dnsmasq-dns-6578955fd5-46pxh" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.936616 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-dns-svc\") pod \"dnsmasq-dns-6578955fd5-46pxh\" (UID: \"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2\") " pod="openstack/dnsmasq-dns-6578955fd5-46pxh" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.936686 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-46pxh\" (UID: \"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2\") " pod="openstack/dnsmasq-dns-6578955fd5-46pxh" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.936779 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acb321ed-2f39-4c68-bafd-1eabb744fee9-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"acb321ed-2f39-4c68-bafd-1eabb744fee9\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.936845 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-46pxh\" (UID: \"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2\") " pod="openstack/dnsmasq-dns-6578955fd5-46pxh" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.936906 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z49bc\" (UniqueName: \"kubernetes.io/projected/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-kube-api-access-z49bc\") pod \"dnsmasq-dns-6578955fd5-46pxh\" (UID: \"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2\") " pod="openstack/dnsmasq-dns-6578955fd5-46pxh" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.936979 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/acb321ed-2f39-4c68-bafd-1eabb744fee9-scripts\") pod \"cinder-scheduler-0\" (UID: \"acb321ed-2f39-4c68-bafd-1eabb744fee9\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.941517 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/acb321ed-2f39-4c68-bafd-1eabb744fee9-scripts\") pod \"cinder-scheduler-0\" (UID: \"acb321ed-2f39-4c68-bafd-1eabb744fee9\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.942049 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/acb321ed-2f39-4c68-bafd-1eabb744fee9-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"acb321ed-2f39-4c68-bafd-1eabb744fee9\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.951258 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acb321ed-2f39-4c68-bafd-1eabb744fee9-config-data\") pod \"cinder-scheduler-0\" (UID: \"acb321ed-2f39-4c68-bafd-1eabb744fee9\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.953867 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acb321ed-2f39-4c68-bafd-1eabb744fee9-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"acb321ed-2f39-4c68-bafd-1eabb744fee9\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.968214 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/acb321ed-2f39-4c68-bafd-1eabb744fee9-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"acb321ed-2f39-4c68-bafd-1eabb744fee9\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:19 crc kubenswrapper[4742]: I1205 06:12:19.971418 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8xf9\" (UniqueName: \"kubernetes.io/projected/acb321ed-2f39-4c68-bafd-1eabb744fee9-kube-api-access-g8xf9\") pod \"cinder-scheduler-0\" (UID: \"acb321ed-2f39-4c68-bafd-1eabb744fee9\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.039297 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-46pxh\" (UID: \"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2\") " pod="openstack/dnsmasq-dns-6578955fd5-46pxh" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.039338 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z49bc\" (UniqueName: \"kubernetes.io/projected/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-kube-api-access-z49bc\") pod \"dnsmasq-dns-6578955fd5-46pxh\" (UID: \"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2\") " pod="openstack/dnsmasq-dns-6578955fd5-46pxh" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.039383 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-config-data-custom\") pod \"cinder-api-0\" (UID: \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\") " pod="openstack/cinder-api-0" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.039417 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\") " pod="openstack/cinder-api-0" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.039437 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-logs\") pod \"cinder-api-0\" (UID: \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\") " pod="openstack/cinder-api-0" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.039458 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-46pxh\" (UID: \"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2\") " pod="openstack/dnsmasq-dns-6578955fd5-46pxh" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.039506 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\") " pod="openstack/cinder-api-0" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.039524 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-config-data\") pod \"cinder-api-0\" (UID: \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\") " pod="openstack/cinder-api-0" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.039552 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-config\") pod \"dnsmasq-dns-6578955fd5-46pxh\" (UID: \"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2\") " pod="openstack/dnsmasq-dns-6578955fd5-46pxh" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.039583 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-dns-svc\") pod \"dnsmasq-dns-6578955fd5-46pxh\" (UID: \"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2\") " pod="openstack/dnsmasq-dns-6578955fd5-46pxh" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.039605 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-46pxh\" (UID: \"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2\") " pod="openstack/dnsmasq-dns-6578955fd5-46pxh" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.039622 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-scripts\") pod \"cinder-api-0\" (UID: \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\") " pod="openstack/cinder-api-0" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.039641 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c55q9\" (UniqueName: \"kubernetes.io/projected/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-kube-api-access-c55q9\") pod \"cinder-api-0\" (UID: \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\") " pod="openstack/cinder-api-0" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.041944 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-46pxh\" (UID: \"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2\") " pod="openstack/dnsmasq-dns-6578955fd5-46pxh" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.042505 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-dns-svc\") pod \"dnsmasq-dns-6578955fd5-46pxh\" (UID: \"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2\") " pod="openstack/dnsmasq-dns-6578955fd5-46pxh" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.043010 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-config\") pod \"dnsmasq-dns-6578955fd5-46pxh\" (UID: \"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2\") " pod="openstack/dnsmasq-dns-6578955fd5-46pxh" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.043169 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-46pxh\" (UID: \"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2\") " pod="openstack/dnsmasq-dns-6578955fd5-46pxh" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.044212 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-46pxh\" (UID: \"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2\") " pod="openstack/dnsmasq-dns-6578955fd5-46pxh" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.054101 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.064136 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z49bc\" (UniqueName: \"kubernetes.io/projected/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-kube-api-access-z49bc\") pod \"dnsmasq-dns-6578955fd5-46pxh\" (UID: \"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2\") " pod="openstack/dnsmasq-dns-6578955fd5-46pxh" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.141383 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-config-data-custom\") pod \"cinder-api-0\" (UID: \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\") " pod="openstack/cinder-api-0" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.141649 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\") " pod="openstack/cinder-api-0" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.141697 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-logs\") pod \"cinder-api-0\" (UID: \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\") " pod="openstack/cinder-api-0" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.141743 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\") " pod="openstack/cinder-api-0" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.142139 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-logs\") pod \"cinder-api-0\" (UID: \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\") " pod="openstack/cinder-api-0" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.142204 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\") " pod="openstack/cinder-api-0" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.142220 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-config-data\") pod \"cinder-api-0\" (UID: \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\") " pod="openstack/cinder-api-0" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.142276 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-scripts\") pod \"cinder-api-0\" (UID: \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\") " pod="openstack/cinder-api-0" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.142297 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c55q9\" (UniqueName: \"kubernetes.io/projected/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-kube-api-access-c55q9\") pod \"cinder-api-0\" (UID: \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\") " pod="openstack/cinder-api-0" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.145683 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-config-data-custom\") pod \"cinder-api-0\" (UID: \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\") " pod="openstack/cinder-api-0" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.145839 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-config-data\") pod \"cinder-api-0\" (UID: \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\") " pod="openstack/cinder-api-0" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.146608 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\") " pod="openstack/cinder-api-0" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.146925 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-scripts\") pod \"cinder-api-0\" (UID: \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\") " pod="openstack/cinder-api-0" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.149563 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-46pxh" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.170727 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c55q9\" (UniqueName: \"kubernetes.io/projected/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-kube-api-access-c55q9\") pod \"cinder-api-0\" (UID: \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\") " pod="openstack/cinder-api-0" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.252859 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.459756 4742 generic.go:334] "Generic (PLEG): container finished" podID="388e7953-5819-4978-842c-1cf54fd568c9" containerID="d012bb303825e51fab5876490727b6cbc42ec9194656ed2dbf2941cf5a0d71fa" exitCode=0 Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.460007 4742 generic.go:334] "Generic (PLEG): container finished" podID="388e7953-5819-4978-842c-1cf54fd568c9" containerID="d557b005e6d1d32413fda82b0019e6afea6e855c9c127c40dff0cb31227f0df8" exitCode=2 Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.460016 4742 generic.go:334] "Generic (PLEG): container finished" podID="388e7953-5819-4978-842c-1cf54fd568c9" containerID="955cfd5120b0d6e13110d4d1a290597c0926bd70e91c83a93c339f051492b6ef" exitCode=0 Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.459908 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"388e7953-5819-4978-842c-1cf54fd568c9","Type":"ContainerDied","Data":"d012bb303825e51fab5876490727b6cbc42ec9194656ed2dbf2941cf5a0d71fa"} Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.460089 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"388e7953-5819-4978-842c-1cf54fd568c9","Type":"ContainerDied","Data":"d557b005e6d1d32413fda82b0019e6afea6e855c9c127c40dff0cb31227f0df8"} Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.460104 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"388e7953-5819-4978-842c-1cf54fd568c9","Type":"ContainerDied","Data":"955cfd5120b0d6e13110d4d1a290597c0926bd70e91c83a93c339f051492b6ef"} Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.463031 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-66f7f988b5-b5pzf" event={"ID":"fa30e851-f383-42c0-9e09-d8c896ed77ad","Type":"ContainerStarted","Data":"399de0c84bd1ba34d951be5e96866f611d5d76be3f684cfa27b1c3935f262f81"} Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.465235 4742 generic.go:334] "Generic (PLEG): container finished" podID="d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d" containerID="2c94142218935753c6c9d209e2c482dac33a4509013d87810287db204bb399e6" exitCode=0 Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.465287 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-tw5cz" event={"ID":"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d","Type":"ContainerDied","Data":"2c94142218935753c6c9d209e2c482dac33a4509013d87810287db204bb399e6"} Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.465315 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-tw5cz" event={"ID":"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d","Type":"ContainerStarted","Data":"9e0dbec906230bafc7535945a0c5e04429073020ab4f1c72eed9b1098650e4e3"} Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.503739 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6bd586bb98-kn6cb" event={"ID":"e1170877-6a94-478f-b6de-75fc8dd2c13e","Type":"ContainerStarted","Data":"a014a2ad7cc327d9d56678a678354339e1837cc39922551432492a1b6675b135"} Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.503789 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6bd586bb98-kn6cb" event={"ID":"e1170877-6a94-478f-b6de-75fc8dd2c13e","Type":"ContainerStarted","Data":"8682ebf1f92c0afc545cd78f7740e9bdde156899892d295e085f0160519a75a6"} Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.504503 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6bd586bb98-kn6cb" event={"ID":"e1170877-6a94-478f-b6de-75fc8dd2c13e","Type":"ContainerStarted","Data":"3901ba2ce30d243e43fcc74f982ad55c93401d8ddeb92a456fb9865c26af1f5a"} Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.507243 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6bd586bb98-kn6cb" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.507293 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6bd586bb98-kn6cb" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.519364 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7bd94f978b-h9cm5" event={"ID":"2f4c3ae5-d78c-4ddb-953d-cbee5b815be9","Type":"ContainerStarted","Data":"1d7c4f5e9ea46946dded5d22e0e6596417c3c84bd505c0da508bce48c06d7f74"} Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.545934 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6bd586bb98-kn6cb" podStartSLOduration=2.545908726 podStartE2EDuration="2.545908726s" podCreationTimestamp="2025-12-05 06:12:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:12:20.531580344 +0000 UTC m=+1216.443715436" watchObservedRunningTime="2025-12-05 06:12:20.545908726 +0000 UTC m=+1216.458043798" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.662123 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 06:12:20 crc kubenswrapper[4742]: E1205 06:12:20.758297 4742 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Dec 05 06:12:20 crc kubenswrapper[4742]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 05 06:12:20 crc kubenswrapper[4742]: > podSandboxID="9e0dbec906230bafc7535945a0c5e04429073020ab4f1c72eed9b1098650e4e3" Dec 05 06:12:20 crc kubenswrapper[4742]: E1205 06:12:20.758468 4742 kuberuntime_manager.go:1274] "Unhandled Error" err=< Dec 05 06:12:20 crc kubenswrapper[4742]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n99h8bhd9h696h649h588h5c6h658h5b4h57fh65h89h5f5h56h696h5dh8h57h597h68ch568h58dh66hf4h675h598h588h67dhb5h69h5dh6bq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-swift-storage-0,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-swift-storage-0,SubPath:dns-swift-storage-0,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-nb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-nb,SubPath:ovsdbserver-nb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-sb,SubPath:ovsdbserver-sb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cfnrq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-848cf88cfc-tw5cz_openstack(d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 05 06:12:20 crc kubenswrapper[4742]: > logger="UnhandledError" Dec 05 06:12:20 crc kubenswrapper[4742]: E1205 06:12:20.759750 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-848cf88cfc-tw5cz" podUID="d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d" Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.771000 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-46pxh"] Dec 05 06:12:20 crc kubenswrapper[4742]: I1205 06:12:20.885380 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 06:12:21 crc kubenswrapper[4742]: W1205 06:12:21.407345 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd0352ff8_ed72_463c_a7b8_3e69e23a3ea2.slice/crio-1347c934f24ff35b4b873199618519b4ccd8b96b0fa2279e16092c71d3302806 WatchSource:0}: Error finding container 1347c934f24ff35b4b873199618519b4ccd8b96b0fa2279e16092c71d3302806: Status 404 returned error can't find the container with id 1347c934f24ff35b4b873199618519b4ccd8b96b0fa2279e16092c71d3302806 Dec 05 06:12:21 crc kubenswrapper[4742]: W1205 06:12:21.411820 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podab7867d7_6a6b_476e_aadf_ecf260aa91a2.slice/crio-6b6ecb4b61b8c1e971fe1341205179ff9e50424ecad8e2a72b9942e209c9ac0a WatchSource:0}: Error finding container 6b6ecb4b61b8c1e971fe1341205179ff9e50424ecad8e2a72b9942e209c9ac0a: Status 404 returned error can't find the container with id 6b6ecb4b61b8c1e971fe1341205179ff9e50424ecad8e2a72b9942e209c9ac0a Dec 05 06:12:21 crc kubenswrapper[4742]: I1205 06:12:21.545982 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-46pxh" event={"ID":"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2","Type":"ContainerStarted","Data":"1347c934f24ff35b4b873199618519b4ccd8b96b0fa2279e16092c71d3302806"} Dec 05 06:12:21 crc kubenswrapper[4742]: I1205 06:12:21.547612 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ab7867d7-6a6b-476e-aadf-ecf260aa91a2","Type":"ContainerStarted","Data":"6b6ecb4b61b8c1e971fe1341205179ff9e50424ecad8e2a72b9942e209c9ac0a"} Dec 05 06:12:21 crc kubenswrapper[4742]: I1205 06:12:21.552642 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"acb321ed-2f39-4c68-bafd-1eabb744fee9","Type":"ContainerStarted","Data":"e38bd9817dfd597dfd55a46701fe70ba64789aa4ee3f505015bfea857a70a312"} Dec 05 06:12:21 crc kubenswrapper[4742]: I1205 06:12:21.864895 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-tw5cz" Dec 05 06:12:21 crc kubenswrapper[4742]: I1205 06:12:21.977414 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-dns-swift-storage-0\") pod \"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d\" (UID: \"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d\") " Dec 05 06:12:21 crc kubenswrapper[4742]: I1205 06:12:21.977469 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-ovsdbserver-sb\") pod \"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d\" (UID: \"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d\") " Dec 05 06:12:21 crc kubenswrapper[4742]: I1205 06:12:21.977517 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfnrq\" (UniqueName: \"kubernetes.io/projected/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-kube-api-access-cfnrq\") pod \"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d\" (UID: \"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d\") " Dec 05 06:12:21 crc kubenswrapper[4742]: I1205 06:12:21.977596 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-config\") pod \"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d\" (UID: \"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d\") " Dec 05 06:12:21 crc kubenswrapper[4742]: I1205 06:12:21.977645 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-dns-svc\") pod \"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d\" (UID: \"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d\") " Dec 05 06:12:21 crc kubenswrapper[4742]: I1205 06:12:21.977738 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-ovsdbserver-nb\") pod \"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d\" (UID: \"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d\") " Dec 05 06:12:21 crc kubenswrapper[4742]: I1205 06:12:21.983206 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-kube-api-access-cfnrq" (OuterVolumeSpecName: "kube-api-access-cfnrq") pod "d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d" (UID: "d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d"). InnerVolumeSpecName "kube-api-access-cfnrq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.068189 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-config" (OuterVolumeSpecName: "config") pod "d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d" (UID: "d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.075550 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d" (UID: "d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.078664 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d" (UID: "d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.079730 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfnrq\" (UniqueName: \"kubernetes.io/projected/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-kube-api-access-cfnrq\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.079750 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.079760 4742 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.079771 4742 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.081935 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d" (UID: "d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.105687 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d" (UID: "d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.180951 4742 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.180980 4742 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.565837 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7bd94f978b-h9cm5" event={"ID":"2f4c3ae5-d78c-4ddb-953d-cbee5b815be9","Type":"ContainerStarted","Data":"2f4156cfc6ee4ae12fa0ce4c17f20f7d82287d71fe307700ba39947bbecf3c02"} Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.566111 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7bd94f978b-h9cm5" event={"ID":"2f4c3ae5-d78c-4ddb-953d-cbee5b815be9","Type":"ContainerStarted","Data":"3710b941ebb6d9eff059006febf49493e12500d9cc0dc124f356193d93849a0b"} Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.568075 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ab7867d7-6a6b-476e-aadf-ecf260aa91a2","Type":"ContainerStarted","Data":"8b3ddbab01731f04a3cea09cb6aff25b48e893351504eaac45d2a446e72c656f"} Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.578711 4742 generic.go:334] "Generic (PLEG): container finished" podID="388e7953-5819-4978-842c-1cf54fd568c9" containerID="4277d32a24ea03efda6807e804b77cafeebe7c67b2fec2c05c23aeeef287955d" exitCode=0 Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.578787 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"388e7953-5819-4978-842c-1cf54fd568c9","Type":"ContainerDied","Data":"4277d32a24ea03efda6807e804b77cafeebe7c67b2fec2c05c23aeeef287955d"} Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.588679 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-66f7f988b5-b5pzf" event={"ID":"fa30e851-f383-42c0-9e09-d8c896ed77ad","Type":"ContainerStarted","Data":"c6a468d9df31190dc440d8ef2e011bdc021d2f9b7a5c87349652c7adab9aea44"} Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.588725 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-66f7f988b5-b5pzf" event={"ID":"fa30e851-f383-42c0-9e09-d8c896ed77ad","Type":"ContainerStarted","Data":"9859f27d8f4bb1f62c35ce8e77ebd26f8aca7e99762a5eb2b0f4ad252e8f2430"} Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.589659 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-7bd94f978b-h9cm5" podStartSLOduration=2.608178083 podStartE2EDuration="4.589648604s" podCreationTimestamp="2025-12-05 06:12:18 +0000 UTC" firstStartedPulling="2025-12-05 06:12:19.493216585 +0000 UTC m=+1215.405351647" lastFinishedPulling="2025-12-05 06:12:21.474687106 +0000 UTC m=+1217.386822168" observedRunningTime="2025-12-05 06:12:22.581790265 +0000 UTC m=+1218.493925337" watchObservedRunningTime="2025-12-05 06:12:22.589648604 +0000 UTC m=+1218.501783666" Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.591452 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-tw5cz" event={"ID":"d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d","Type":"ContainerDied","Data":"9e0dbec906230bafc7535945a0c5e04429073020ab4f1c72eed9b1098650e4e3"} Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.591483 4742 scope.go:117] "RemoveContainer" containerID="2c94142218935753c6c9d209e2c482dac33a4509013d87810287db204bb399e6" Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.591574 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-tw5cz" Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.600021 4742 generic.go:334] "Generic (PLEG): container finished" podID="d0352ff8-ed72-463c-a7b8-3e69e23a3ea2" containerID="f0c13babb3b0d83117243ce9f5757a86bcd5049dbd2b26f9e68d5e00c8341438" exitCode=0 Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.600205 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-46pxh" event={"ID":"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2","Type":"ContainerDied","Data":"f0c13babb3b0d83117243ce9f5757a86bcd5049dbd2b26f9e68d5e00c8341438"} Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.609963 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-66f7f988b5-b5pzf" podStartSLOduration=2.784967641 podStartE2EDuration="4.609943075s" podCreationTimestamp="2025-12-05 06:12:18 +0000 UTC" firstStartedPulling="2025-12-05 06:12:19.651349685 +0000 UTC m=+1215.563484747" lastFinishedPulling="2025-12-05 06:12:21.476325099 +0000 UTC m=+1217.388460181" observedRunningTime="2025-12-05 06:12:22.608605579 +0000 UTC m=+1218.520740631" watchObservedRunningTime="2025-12-05 06:12:22.609943075 +0000 UTC m=+1218.522078137" Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.750454 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.810116 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-tw5cz"] Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.817013 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lmp8w\" (UniqueName: \"kubernetes.io/projected/388e7953-5819-4978-842c-1cf54fd568c9-kube-api-access-lmp8w\") pod \"388e7953-5819-4978-842c-1cf54fd568c9\" (UID: \"388e7953-5819-4978-842c-1cf54fd568c9\") " Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.817093 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/388e7953-5819-4978-842c-1cf54fd568c9-scripts\") pod \"388e7953-5819-4978-842c-1cf54fd568c9\" (UID: \"388e7953-5819-4978-842c-1cf54fd568c9\") " Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.817115 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/388e7953-5819-4978-842c-1cf54fd568c9-sg-core-conf-yaml\") pod \"388e7953-5819-4978-842c-1cf54fd568c9\" (UID: \"388e7953-5819-4978-842c-1cf54fd568c9\") " Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.817141 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/388e7953-5819-4978-842c-1cf54fd568c9-run-httpd\") pod \"388e7953-5819-4978-842c-1cf54fd568c9\" (UID: \"388e7953-5819-4978-842c-1cf54fd568c9\") " Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.817198 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/388e7953-5819-4978-842c-1cf54fd568c9-config-data\") pod \"388e7953-5819-4978-842c-1cf54fd568c9\" (UID: \"388e7953-5819-4978-842c-1cf54fd568c9\") " Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.817219 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/388e7953-5819-4978-842c-1cf54fd568c9-log-httpd\") pod \"388e7953-5819-4978-842c-1cf54fd568c9\" (UID: \"388e7953-5819-4978-842c-1cf54fd568c9\") " Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.817279 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/388e7953-5819-4978-842c-1cf54fd568c9-combined-ca-bundle\") pod \"388e7953-5819-4978-842c-1cf54fd568c9\" (UID: \"388e7953-5819-4978-842c-1cf54fd568c9\") " Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.821899 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/388e7953-5819-4978-842c-1cf54fd568c9-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "388e7953-5819-4978-842c-1cf54fd568c9" (UID: "388e7953-5819-4978-842c-1cf54fd568c9"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.822248 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/388e7953-5819-4978-842c-1cf54fd568c9-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "388e7953-5819-4978-842c-1cf54fd568c9" (UID: "388e7953-5819-4978-842c-1cf54fd568c9"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.830003 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/388e7953-5819-4978-842c-1cf54fd568c9-kube-api-access-lmp8w" (OuterVolumeSpecName: "kube-api-access-lmp8w") pod "388e7953-5819-4978-842c-1cf54fd568c9" (UID: "388e7953-5819-4978-842c-1cf54fd568c9"). InnerVolumeSpecName "kube-api-access-lmp8w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.832021 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/388e7953-5819-4978-842c-1cf54fd568c9-scripts" (OuterVolumeSpecName: "scripts") pod "388e7953-5819-4978-842c-1cf54fd568c9" (UID: "388e7953-5819-4978-842c-1cf54fd568c9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.832258 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-tw5cz"] Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.876679 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/388e7953-5819-4978-842c-1cf54fd568c9-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "388e7953-5819-4978-842c-1cf54fd568c9" (UID: "388e7953-5819-4978-842c-1cf54fd568c9"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.919732 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lmp8w\" (UniqueName: \"kubernetes.io/projected/388e7953-5819-4978-842c-1cf54fd568c9-kube-api-access-lmp8w\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.919788 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/388e7953-5819-4978-842c-1cf54fd568c9-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.919801 4742 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/388e7953-5819-4978-842c-1cf54fd568c9-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.919813 4742 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/388e7953-5819-4978-842c-1cf54fd568c9-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.919824 4742 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/388e7953-5819-4978-842c-1cf54fd568c9-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.921525 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/388e7953-5819-4978-842c-1cf54fd568c9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "388e7953-5819-4978-842c-1cf54fd568c9" (UID: "388e7953-5819-4978-842c-1cf54fd568c9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:22 crc kubenswrapper[4742]: I1205 06:12:22.959932 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/388e7953-5819-4978-842c-1cf54fd568c9-config-data" (OuterVolumeSpecName: "config-data") pod "388e7953-5819-4978-842c-1cf54fd568c9" (UID: "388e7953-5819-4978-842c-1cf54fd568c9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.021966 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/388e7953-5819-4978-842c-1cf54fd568c9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.022007 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/388e7953-5819-4978-842c-1cf54fd568c9-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.615808 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"388e7953-5819-4978-842c-1cf54fd568c9","Type":"ContainerDied","Data":"3ab014d5fc00da8e154c84721f06351ebc4410d32d4a0077bf705637af70942b"} Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.616163 4742 scope.go:117] "RemoveContainer" containerID="d012bb303825e51fab5876490727b6cbc42ec9194656ed2dbf2941cf5a0d71fa" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.616064 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.634327 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"acb321ed-2f39-4c68-bafd-1eabb744fee9","Type":"ContainerStarted","Data":"4ccf56ad8bfa048ecc6e15151ff71aa24d402ac41a1188da330cfb7757a8cfe6"} Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.634367 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"acb321ed-2f39-4c68-bafd-1eabb744fee9","Type":"ContainerStarted","Data":"15f60914af2112465ebf23d1b3f7799dd5741321a833d188d7b7a0ab32e96194"} Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.646221 4742 scope.go:117] "RemoveContainer" containerID="d557b005e6d1d32413fda82b0019e6afea6e855c9c127c40dff0cb31227f0df8" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.647250 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-46pxh" event={"ID":"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2","Type":"ContainerStarted","Data":"4af5cdeba3084a95fef3915a0cce0da54f6dbf9824935a0915a46081f62f7ba4"} Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.647958 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6578955fd5-46pxh" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.654068 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ab7867d7-6a6b-476e-aadf-ecf260aa91a2","Type":"ContainerStarted","Data":"22ad0733e76c35d8f29e71e1188f46891e06d8cb26b6c1d66aeb135c6ddba592"} Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.656695 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.660574 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.5720977080000003 podStartE2EDuration="4.66056089s" podCreationTimestamp="2025-12-05 06:12:19 +0000 UTC" firstStartedPulling="2025-12-05 06:12:20.668999403 +0000 UTC m=+1216.581134465" lastFinishedPulling="2025-12-05 06:12:21.757462585 +0000 UTC m=+1217.669597647" observedRunningTime="2025-12-05 06:12:23.659901813 +0000 UTC m=+1219.572036865" watchObservedRunningTime="2025-12-05 06:12:23.66056089 +0000 UTC m=+1219.572695962" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.676713 4742 scope.go:117] "RemoveContainer" containerID="4277d32a24ea03efda6807e804b77cafeebe7c67b2fec2c05c23aeeef287955d" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.684451 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6578955fd5-46pxh" podStartSLOduration=4.684434506 podStartE2EDuration="4.684434506s" podCreationTimestamp="2025-12-05 06:12:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:12:23.683976254 +0000 UTC m=+1219.596111326" watchObservedRunningTime="2025-12-05 06:12:23.684434506 +0000 UTC m=+1219.596569568" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.708759 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.708904 4742 scope.go:117] "RemoveContainer" containerID="955cfd5120b0d6e13110d4d1a290597c0926bd70e91c83a93c339f051492b6ef" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.722875 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.742027 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:12:23 crc kubenswrapper[4742]: E1205 06:12:23.742455 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="388e7953-5819-4978-842c-1cf54fd568c9" containerName="proxy-httpd" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.742474 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="388e7953-5819-4978-842c-1cf54fd568c9" containerName="proxy-httpd" Dec 05 06:12:23 crc kubenswrapper[4742]: E1205 06:12:23.742485 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="388e7953-5819-4978-842c-1cf54fd568c9" containerName="ceilometer-central-agent" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.742492 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="388e7953-5819-4978-842c-1cf54fd568c9" containerName="ceilometer-central-agent" Dec 05 06:12:23 crc kubenswrapper[4742]: E1205 06:12:23.742515 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="388e7953-5819-4978-842c-1cf54fd568c9" containerName="ceilometer-notification-agent" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.742522 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="388e7953-5819-4978-842c-1cf54fd568c9" containerName="ceilometer-notification-agent" Dec 05 06:12:23 crc kubenswrapper[4742]: E1205 06:12:23.742534 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="388e7953-5819-4978-842c-1cf54fd568c9" containerName="sg-core" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.742540 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="388e7953-5819-4978-842c-1cf54fd568c9" containerName="sg-core" Dec 05 06:12:23 crc kubenswrapper[4742]: E1205 06:12:23.742548 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d" containerName="init" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.742554 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d" containerName="init" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.742727 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="388e7953-5819-4978-842c-1cf54fd568c9" containerName="ceilometer-notification-agent" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.742737 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d" containerName="init" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.742747 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="388e7953-5819-4978-842c-1cf54fd568c9" containerName="proxy-httpd" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.742767 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="388e7953-5819-4978-842c-1cf54fd568c9" containerName="sg-core" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.742780 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="388e7953-5819-4978-842c-1cf54fd568c9" containerName="ceilometer-central-agent" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.744290 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.746150 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.746468 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.763350 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.763330627 podStartE2EDuration="4.763330627s" podCreationTimestamp="2025-12-05 06:12:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:12:23.727110733 +0000 UTC m=+1219.639245825" watchObservedRunningTime="2025-12-05 06:12:23.763330627 +0000 UTC m=+1219.675465689" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.777579 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.858979 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c75d285-a5c8-46f4-9a02-46bca7a81694-run-httpd\") pod \"ceilometer-0\" (UID: \"1c75d285-a5c8-46f4-9a02-46bca7a81694\") " pod="openstack/ceilometer-0" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.859022 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c75d285-a5c8-46f4-9a02-46bca7a81694-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1c75d285-a5c8-46f4-9a02-46bca7a81694\") " pod="openstack/ceilometer-0" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.859049 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c75d285-a5c8-46f4-9a02-46bca7a81694-log-httpd\") pod \"ceilometer-0\" (UID: \"1c75d285-a5c8-46f4-9a02-46bca7a81694\") " pod="openstack/ceilometer-0" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.859128 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c75d285-a5c8-46f4-9a02-46bca7a81694-scripts\") pod \"ceilometer-0\" (UID: \"1c75d285-a5c8-46f4-9a02-46bca7a81694\") " pod="openstack/ceilometer-0" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.859171 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c75d285-a5c8-46f4-9a02-46bca7a81694-config-data\") pod \"ceilometer-0\" (UID: \"1c75d285-a5c8-46f4-9a02-46bca7a81694\") " pod="openstack/ceilometer-0" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.859213 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8ldd\" (UniqueName: \"kubernetes.io/projected/1c75d285-a5c8-46f4-9a02-46bca7a81694-kube-api-access-v8ldd\") pod \"ceilometer-0\" (UID: \"1c75d285-a5c8-46f4-9a02-46bca7a81694\") " pod="openstack/ceilometer-0" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.859286 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1c75d285-a5c8-46f4-9a02-46bca7a81694-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1c75d285-a5c8-46f4-9a02-46bca7a81694\") " pod="openstack/ceilometer-0" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.967293 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c75d285-a5c8-46f4-9a02-46bca7a81694-run-httpd\") pod \"ceilometer-0\" (UID: \"1c75d285-a5c8-46f4-9a02-46bca7a81694\") " pod="openstack/ceilometer-0" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.967354 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c75d285-a5c8-46f4-9a02-46bca7a81694-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1c75d285-a5c8-46f4-9a02-46bca7a81694\") " pod="openstack/ceilometer-0" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.967791 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c75d285-a5c8-46f4-9a02-46bca7a81694-run-httpd\") pod \"ceilometer-0\" (UID: \"1c75d285-a5c8-46f4-9a02-46bca7a81694\") " pod="openstack/ceilometer-0" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.968211 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c75d285-a5c8-46f4-9a02-46bca7a81694-log-httpd\") pod \"ceilometer-0\" (UID: \"1c75d285-a5c8-46f4-9a02-46bca7a81694\") " pod="openstack/ceilometer-0" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.968439 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c75d285-a5c8-46f4-9a02-46bca7a81694-log-httpd\") pod \"ceilometer-0\" (UID: \"1c75d285-a5c8-46f4-9a02-46bca7a81694\") " pod="openstack/ceilometer-0" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.968511 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c75d285-a5c8-46f4-9a02-46bca7a81694-scripts\") pod \"ceilometer-0\" (UID: \"1c75d285-a5c8-46f4-9a02-46bca7a81694\") " pod="openstack/ceilometer-0" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.968548 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c75d285-a5c8-46f4-9a02-46bca7a81694-config-data\") pod \"ceilometer-0\" (UID: \"1c75d285-a5c8-46f4-9a02-46bca7a81694\") " pod="openstack/ceilometer-0" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.968601 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8ldd\" (UniqueName: \"kubernetes.io/projected/1c75d285-a5c8-46f4-9a02-46bca7a81694-kube-api-access-v8ldd\") pod \"ceilometer-0\" (UID: \"1c75d285-a5c8-46f4-9a02-46bca7a81694\") " pod="openstack/ceilometer-0" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.968673 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1c75d285-a5c8-46f4-9a02-46bca7a81694-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1c75d285-a5c8-46f4-9a02-46bca7a81694\") " pod="openstack/ceilometer-0" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.974364 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c75d285-a5c8-46f4-9a02-46bca7a81694-config-data\") pod \"ceilometer-0\" (UID: \"1c75d285-a5c8-46f4-9a02-46bca7a81694\") " pod="openstack/ceilometer-0" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.983841 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c75d285-a5c8-46f4-9a02-46bca7a81694-scripts\") pod \"ceilometer-0\" (UID: \"1c75d285-a5c8-46f4-9a02-46bca7a81694\") " pod="openstack/ceilometer-0" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.983848 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c75d285-a5c8-46f4-9a02-46bca7a81694-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1c75d285-a5c8-46f4-9a02-46bca7a81694\") " pod="openstack/ceilometer-0" Dec 05 06:12:23 crc kubenswrapper[4742]: I1205 06:12:23.984318 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1c75d285-a5c8-46f4-9a02-46bca7a81694-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1c75d285-a5c8-46f4-9a02-46bca7a81694\") " pod="openstack/ceilometer-0" Dec 05 06:12:24 crc kubenswrapper[4742]: I1205 06:12:23.999342 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8ldd\" (UniqueName: \"kubernetes.io/projected/1c75d285-a5c8-46f4-9a02-46bca7a81694-kube-api-access-v8ldd\") pod \"ceilometer-0\" (UID: \"1c75d285-a5c8-46f4-9a02-46bca7a81694\") " pod="openstack/ceilometer-0" Dec 05 06:12:24 crc kubenswrapper[4742]: I1205 06:12:24.068536 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 06:12:24 crc kubenswrapper[4742]: I1205 06:12:24.393206 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="388e7953-5819-4978-842c-1cf54fd568c9" path="/var/lib/kubelet/pods/388e7953-5819-4978-842c-1cf54fd568c9/volumes" Dec 05 06:12:24 crc kubenswrapper[4742]: I1205 06:12:24.394153 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d" path="/var/lib/kubelet/pods/d0f4d3ad-2d27-4c1c-b3e5-a81395d2f73d/volumes" Dec 05 06:12:24 crc kubenswrapper[4742]: I1205 06:12:24.412328 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:12:24 crc kubenswrapper[4742]: I1205 06:12:24.669369 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c75d285-a5c8-46f4-9a02-46bca7a81694","Type":"ContainerStarted","Data":"585f9c09cfa6ed9b126910a041f6f497f567720b338d4a73224fb5f32aead6cd"} Dec 05 06:12:24 crc kubenswrapper[4742]: I1205 06:12:24.670448 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 05 06:12:25 crc kubenswrapper[4742]: I1205 06:12:25.041133 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5b594b6ccb-vbxpj"] Dec 05 06:12:25 crc kubenswrapper[4742]: I1205 06:12:25.042655 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5b594b6ccb-vbxpj" Dec 05 06:12:25 crc kubenswrapper[4742]: I1205 06:12:25.045311 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 05 06:12:25 crc kubenswrapper[4742]: I1205 06:12:25.045777 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 05 06:12:25 crc kubenswrapper[4742]: I1205 06:12:25.055612 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5b594b6ccb-vbxpj"] Dec 05 06:12:25 crc kubenswrapper[4742]: I1205 06:12:25.055677 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 05 06:12:25 crc kubenswrapper[4742]: I1205 06:12:25.197347 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9974486-076d-4493-af32-a08eef334572-public-tls-certs\") pod \"barbican-api-5b594b6ccb-vbxpj\" (UID: \"e9974486-076d-4493-af32-a08eef334572\") " pod="openstack/barbican-api-5b594b6ccb-vbxpj" Dec 05 06:12:25 crc kubenswrapper[4742]: I1205 06:12:25.197630 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9974486-076d-4493-af32-a08eef334572-internal-tls-certs\") pod \"barbican-api-5b594b6ccb-vbxpj\" (UID: \"e9974486-076d-4493-af32-a08eef334572\") " pod="openstack/barbican-api-5b594b6ccb-vbxpj" Dec 05 06:12:25 crc kubenswrapper[4742]: I1205 06:12:25.197669 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9974486-076d-4493-af32-a08eef334572-combined-ca-bundle\") pod \"barbican-api-5b594b6ccb-vbxpj\" (UID: \"e9974486-076d-4493-af32-a08eef334572\") " pod="openstack/barbican-api-5b594b6ccb-vbxpj" Dec 05 06:12:25 crc kubenswrapper[4742]: I1205 06:12:25.197699 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9974486-076d-4493-af32-a08eef334572-config-data\") pod \"barbican-api-5b594b6ccb-vbxpj\" (UID: \"e9974486-076d-4493-af32-a08eef334572\") " pod="openstack/barbican-api-5b594b6ccb-vbxpj" Dec 05 06:12:25 crc kubenswrapper[4742]: I1205 06:12:25.197722 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v88cc\" (UniqueName: \"kubernetes.io/projected/e9974486-076d-4493-af32-a08eef334572-kube-api-access-v88cc\") pod \"barbican-api-5b594b6ccb-vbxpj\" (UID: \"e9974486-076d-4493-af32-a08eef334572\") " pod="openstack/barbican-api-5b594b6ccb-vbxpj" Dec 05 06:12:25 crc kubenswrapper[4742]: I1205 06:12:25.197751 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e9974486-076d-4493-af32-a08eef334572-config-data-custom\") pod \"barbican-api-5b594b6ccb-vbxpj\" (UID: \"e9974486-076d-4493-af32-a08eef334572\") " pod="openstack/barbican-api-5b594b6ccb-vbxpj" Dec 05 06:12:25 crc kubenswrapper[4742]: I1205 06:12:25.197775 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9974486-076d-4493-af32-a08eef334572-logs\") pod \"barbican-api-5b594b6ccb-vbxpj\" (UID: \"e9974486-076d-4493-af32-a08eef334572\") " pod="openstack/barbican-api-5b594b6ccb-vbxpj" Dec 05 06:12:25 crc kubenswrapper[4742]: I1205 06:12:25.299591 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e9974486-076d-4493-af32-a08eef334572-config-data-custom\") pod \"barbican-api-5b594b6ccb-vbxpj\" (UID: \"e9974486-076d-4493-af32-a08eef334572\") " pod="openstack/barbican-api-5b594b6ccb-vbxpj" Dec 05 06:12:25 crc kubenswrapper[4742]: I1205 06:12:25.299646 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9974486-076d-4493-af32-a08eef334572-logs\") pod \"barbican-api-5b594b6ccb-vbxpj\" (UID: \"e9974486-076d-4493-af32-a08eef334572\") " pod="openstack/barbican-api-5b594b6ccb-vbxpj" Dec 05 06:12:25 crc kubenswrapper[4742]: I1205 06:12:25.299735 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9974486-076d-4493-af32-a08eef334572-public-tls-certs\") pod \"barbican-api-5b594b6ccb-vbxpj\" (UID: \"e9974486-076d-4493-af32-a08eef334572\") " pod="openstack/barbican-api-5b594b6ccb-vbxpj" Dec 05 06:12:25 crc kubenswrapper[4742]: I1205 06:12:25.299764 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9974486-076d-4493-af32-a08eef334572-internal-tls-certs\") pod \"barbican-api-5b594b6ccb-vbxpj\" (UID: \"e9974486-076d-4493-af32-a08eef334572\") " pod="openstack/barbican-api-5b594b6ccb-vbxpj" Dec 05 06:12:25 crc kubenswrapper[4742]: I1205 06:12:25.299798 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9974486-076d-4493-af32-a08eef334572-combined-ca-bundle\") pod \"barbican-api-5b594b6ccb-vbxpj\" (UID: \"e9974486-076d-4493-af32-a08eef334572\") " pod="openstack/barbican-api-5b594b6ccb-vbxpj" Dec 05 06:12:25 crc kubenswrapper[4742]: I1205 06:12:25.299832 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9974486-076d-4493-af32-a08eef334572-config-data\") pod \"barbican-api-5b594b6ccb-vbxpj\" (UID: \"e9974486-076d-4493-af32-a08eef334572\") " pod="openstack/barbican-api-5b594b6ccb-vbxpj" Dec 05 06:12:25 crc kubenswrapper[4742]: I1205 06:12:25.299861 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v88cc\" (UniqueName: \"kubernetes.io/projected/e9974486-076d-4493-af32-a08eef334572-kube-api-access-v88cc\") pod \"barbican-api-5b594b6ccb-vbxpj\" (UID: \"e9974486-076d-4493-af32-a08eef334572\") " pod="openstack/barbican-api-5b594b6ccb-vbxpj" Dec 05 06:12:25 crc kubenswrapper[4742]: I1205 06:12:25.300364 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9974486-076d-4493-af32-a08eef334572-logs\") pod \"barbican-api-5b594b6ccb-vbxpj\" (UID: \"e9974486-076d-4493-af32-a08eef334572\") " pod="openstack/barbican-api-5b594b6ccb-vbxpj" Dec 05 06:12:25 crc kubenswrapper[4742]: I1205 06:12:25.303646 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e9974486-076d-4493-af32-a08eef334572-config-data-custom\") pod \"barbican-api-5b594b6ccb-vbxpj\" (UID: \"e9974486-076d-4493-af32-a08eef334572\") " pod="openstack/barbican-api-5b594b6ccb-vbxpj" Dec 05 06:12:25 crc kubenswrapper[4742]: I1205 06:12:25.304162 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9974486-076d-4493-af32-a08eef334572-combined-ca-bundle\") pod \"barbican-api-5b594b6ccb-vbxpj\" (UID: \"e9974486-076d-4493-af32-a08eef334572\") " pod="openstack/barbican-api-5b594b6ccb-vbxpj" Dec 05 06:12:25 crc kubenswrapper[4742]: I1205 06:12:25.304179 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9974486-076d-4493-af32-a08eef334572-public-tls-certs\") pod \"barbican-api-5b594b6ccb-vbxpj\" (UID: \"e9974486-076d-4493-af32-a08eef334572\") " pod="openstack/barbican-api-5b594b6ccb-vbxpj" Dec 05 06:12:25 crc kubenswrapper[4742]: I1205 06:12:25.305567 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9974486-076d-4493-af32-a08eef334572-config-data\") pod \"barbican-api-5b594b6ccb-vbxpj\" (UID: \"e9974486-076d-4493-af32-a08eef334572\") " pod="openstack/barbican-api-5b594b6ccb-vbxpj" Dec 05 06:12:25 crc kubenswrapper[4742]: I1205 06:12:25.314185 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9974486-076d-4493-af32-a08eef334572-internal-tls-certs\") pod \"barbican-api-5b594b6ccb-vbxpj\" (UID: \"e9974486-076d-4493-af32-a08eef334572\") " pod="openstack/barbican-api-5b594b6ccb-vbxpj" Dec 05 06:12:25 crc kubenswrapper[4742]: I1205 06:12:25.316705 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v88cc\" (UniqueName: \"kubernetes.io/projected/e9974486-076d-4493-af32-a08eef334572-kube-api-access-v88cc\") pod \"barbican-api-5b594b6ccb-vbxpj\" (UID: \"e9974486-076d-4493-af32-a08eef334572\") " pod="openstack/barbican-api-5b594b6ccb-vbxpj" Dec 05 06:12:25 crc kubenswrapper[4742]: I1205 06:12:25.386758 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5b594b6ccb-vbxpj" Dec 05 06:12:25 crc kubenswrapper[4742]: I1205 06:12:25.678132 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c75d285-a5c8-46f4-9a02-46bca7a81694","Type":"ContainerStarted","Data":"01a430e7ee682bd0558d05549cc5337e93ae20e189353815e2f64ec224b76659"} Dec 05 06:12:25 crc kubenswrapper[4742]: I1205 06:12:25.680279 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="ab7867d7-6a6b-476e-aadf-ecf260aa91a2" containerName="cinder-api-log" containerID="cri-o://8b3ddbab01731f04a3cea09cb6aff25b48e893351504eaac45d2a446e72c656f" gracePeriod=30 Dec 05 06:12:25 crc kubenswrapper[4742]: I1205 06:12:25.680514 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="ab7867d7-6a6b-476e-aadf-ecf260aa91a2" containerName="cinder-api" containerID="cri-o://22ad0733e76c35d8f29e71e1188f46891e06d8cb26b6c1d66aeb135c6ddba592" gracePeriod=30 Dec 05 06:12:25 crc kubenswrapper[4742]: I1205 06:12:25.881010 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5b594b6ccb-vbxpj"] Dec 05 06:12:25 crc kubenswrapper[4742]: W1205 06:12:25.891433 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode9974486_076d_4493_af32_a08eef334572.slice/crio-46de7901bfdf9ac1953f8ebd96789e43e6d66b197c2a14f635ad4b7a25f3e3e8 WatchSource:0}: Error finding container 46de7901bfdf9ac1953f8ebd96789e43e6d66b197c2a14f635ad4b7a25f3e3e8: Status 404 returned error can't find the container with id 46de7901bfdf9ac1953f8ebd96789e43e6d66b197c2a14f635ad4b7a25f3e3e8 Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.110861 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.216857 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-config-data\") pod \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\" (UID: \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\") " Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.216919 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c55q9\" (UniqueName: \"kubernetes.io/projected/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-kube-api-access-c55q9\") pod \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\" (UID: \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\") " Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.216946 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-combined-ca-bundle\") pod \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\" (UID: \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\") " Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.216966 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-logs\") pod \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\" (UID: \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\") " Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.216990 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-config-data-custom\") pod \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\" (UID: \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\") " Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.217604 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-etc-machine-id\") pod \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\" (UID: \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\") " Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.217634 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-scripts\") pod \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\" (UID: \"ab7867d7-6a6b-476e-aadf-ecf260aa91a2\") " Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.219548 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-logs" (OuterVolumeSpecName: "logs") pod "ab7867d7-6a6b-476e-aadf-ecf260aa91a2" (UID: "ab7867d7-6a6b-476e-aadf-ecf260aa91a2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.220146 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "ab7867d7-6a6b-476e-aadf-ecf260aa91a2" (UID: "ab7867d7-6a6b-476e-aadf-ecf260aa91a2"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.222171 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-scripts" (OuterVolumeSpecName: "scripts") pod "ab7867d7-6a6b-476e-aadf-ecf260aa91a2" (UID: "ab7867d7-6a6b-476e-aadf-ecf260aa91a2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.224441 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-kube-api-access-c55q9" (OuterVolumeSpecName: "kube-api-access-c55q9") pod "ab7867d7-6a6b-476e-aadf-ecf260aa91a2" (UID: "ab7867d7-6a6b-476e-aadf-ecf260aa91a2"). InnerVolumeSpecName "kube-api-access-c55q9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.228475 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ab7867d7-6a6b-476e-aadf-ecf260aa91a2" (UID: "ab7867d7-6a6b-476e-aadf-ecf260aa91a2"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.286339 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ab7867d7-6a6b-476e-aadf-ecf260aa91a2" (UID: "ab7867d7-6a6b-476e-aadf-ecf260aa91a2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.313906 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-config-data" (OuterVolumeSpecName: "config-data") pod "ab7867d7-6a6b-476e-aadf-ecf260aa91a2" (UID: "ab7867d7-6a6b-476e-aadf-ecf260aa91a2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.320108 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.320255 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.320315 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c55q9\" (UniqueName: \"kubernetes.io/projected/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-kube-api-access-c55q9\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.320392 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.320450 4742 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-logs\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.320500 4742 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.320557 4742 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ab7867d7-6a6b-476e-aadf-ecf260aa91a2-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.693846 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c75d285-a5c8-46f4-9a02-46bca7a81694","Type":"ContainerStarted","Data":"2803d832cdfff451b43b9a925d99698f58c41069d927a523197d9ed45511172c"} Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.694287 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c75d285-a5c8-46f4-9a02-46bca7a81694","Type":"ContainerStarted","Data":"37d4718bc018b00d8f844cd6925baeee526cbc305d9522ef98c7b9906946f6f4"} Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.697788 4742 generic.go:334] "Generic (PLEG): container finished" podID="ab7867d7-6a6b-476e-aadf-ecf260aa91a2" containerID="22ad0733e76c35d8f29e71e1188f46891e06d8cb26b6c1d66aeb135c6ddba592" exitCode=0 Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.697819 4742 generic.go:334] "Generic (PLEG): container finished" podID="ab7867d7-6a6b-476e-aadf-ecf260aa91a2" containerID="8b3ddbab01731f04a3cea09cb6aff25b48e893351504eaac45d2a446e72c656f" exitCode=143 Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.697866 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ab7867d7-6a6b-476e-aadf-ecf260aa91a2","Type":"ContainerDied","Data":"22ad0733e76c35d8f29e71e1188f46891e06d8cb26b6c1d66aeb135c6ddba592"} Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.697889 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ab7867d7-6a6b-476e-aadf-ecf260aa91a2","Type":"ContainerDied","Data":"8b3ddbab01731f04a3cea09cb6aff25b48e893351504eaac45d2a446e72c656f"} Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.697902 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ab7867d7-6a6b-476e-aadf-ecf260aa91a2","Type":"ContainerDied","Data":"6b6ecb4b61b8c1e971fe1341205179ff9e50424ecad8e2a72b9942e209c9ac0a"} Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.697921 4742 scope.go:117] "RemoveContainer" containerID="22ad0733e76c35d8f29e71e1188f46891e06d8cb26b6c1d66aeb135c6ddba592" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.698291 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.702267 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5b594b6ccb-vbxpj" event={"ID":"e9974486-076d-4493-af32-a08eef334572","Type":"ContainerStarted","Data":"1d507c229540319f85af5a5bb49cd7bea47d3c4c4e80bec322f63230f391811c"} Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.702469 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5b594b6ccb-vbxpj" event={"ID":"e9974486-076d-4493-af32-a08eef334572","Type":"ContainerStarted","Data":"c58c31d14e6bc541855c1db0c9f365ab77a7e68becb2933d6d951d6d108a2537"} Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.702537 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5b594b6ccb-vbxpj" event={"ID":"e9974486-076d-4493-af32-a08eef334572","Type":"ContainerStarted","Data":"46de7901bfdf9ac1953f8ebd96789e43e6d66b197c2a14f635ad4b7a25f3e3e8"} Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.702768 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5b594b6ccb-vbxpj" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.702813 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5b594b6ccb-vbxpj" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.728751 4742 scope.go:117] "RemoveContainer" containerID="8b3ddbab01731f04a3cea09cb6aff25b48e893351504eaac45d2a446e72c656f" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.731307 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5b594b6ccb-vbxpj" podStartSLOduration=1.731288975 podStartE2EDuration="1.731288975s" podCreationTimestamp="2025-12-05 06:12:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:12:26.722929603 +0000 UTC m=+1222.635064665" watchObservedRunningTime="2025-12-05 06:12:26.731288975 +0000 UTC m=+1222.643424047" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.761560 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.772790 4742 scope.go:117] "RemoveContainer" containerID="22ad0733e76c35d8f29e71e1188f46891e06d8cb26b6c1d66aeb135c6ddba592" Dec 05 06:12:26 crc kubenswrapper[4742]: E1205 06:12:26.775944 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22ad0733e76c35d8f29e71e1188f46891e06d8cb26b6c1d66aeb135c6ddba592\": container with ID starting with 22ad0733e76c35d8f29e71e1188f46891e06d8cb26b6c1d66aeb135c6ddba592 not found: ID does not exist" containerID="22ad0733e76c35d8f29e71e1188f46891e06d8cb26b6c1d66aeb135c6ddba592" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.775981 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22ad0733e76c35d8f29e71e1188f46891e06d8cb26b6c1d66aeb135c6ddba592"} err="failed to get container status \"22ad0733e76c35d8f29e71e1188f46891e06d8cb26b6c1d66aeb135c6ddba592\": rpc error: code = NotFound desc = could not find container \"22ad0733e76c35d8f29e71e1188f46891e06d8cb26b6c1d66aeb135c6ddba592\": container with ID starting with 22ad0733e76c35d8f29e71e1188f46891e06d8cb26b6c1d66aeb135c6ddba592 not found: ID does not exist" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.776000 4742 scope.go:117] "RemoveContainer" containerID="8b3ddbab01731f04a3cea09cb6aff25b48e893351504eaac45d2a446e72c656f" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.776266 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 05 06:12:26 crc kubenswrapper[4742]: E1205 06:12:26.776346 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b3ddbab01731f04a3cea09cb6aff25b48e893351504eaac45d2a446e72c656f\": container with ID starting with 8b3ddbab01731f04a3cea09cb6aff25b48e893351504eaac45d2a446e72c656f not found: ID does not exist" containerID="8b3ddbab01731f04a3cea09cb6aff25b48e893351504eaac45d2a446e72c656f" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.776394 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b3ddbab01731f04a3cea09cb6aff25b48e893351504eaac45d2a446e72c656f"} err="failed to get container status \"8b3ddbab01731f04a3cea09cb6aff25b48e893351504eaac45d2a446e72c656f\": rpc error: code = NotFound desc = could not find container \"8b3ddbab01731f04a3cea09cb6aff25b48e893351504eaac45d2a446e72c656f\": container with ID starting with 8b3ddbab01731f04a3cea09cb6aff25b48e893351504eaac45d2a446e72c656f not found: ID does not exist" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.776419 4742 scope.go:117] "RemoveContainer" containerID="22ad0733e76c35d8f29e71e1188f46891e06d8cb26b6c1d66aeb135c6ddba592" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.777565 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22ad0733e76c35d8f29e71e1188f46891e06d8cb26b6c1d66aeb135c6ddba592"} err="failed to get container status \"22ad0733e76c35d8f29e71e1188f46891e06d8cb26b6c1d66aeb135c6ddba592\": rpc error: code = NotFound desc = could not find container \"22ad0733e76c35d8f29e71e1188f46891e06d8cb26b6c1d66aeb135c6ddba592\": container with ID starting with 22ad0733e76c35d8f29e71e1188f46891e06d8cb26b6c1d66aeb135c6ddba592 not found: ID does not exist" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.777589 4742 scope.go:117] "RemoveContainer" containerID="8b3ddbab01731f04a3cea09cb6aff25b48e893351504eaac45d2a446e72c656f" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.778413 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b3ddbab01731f04a3cea09cb6aff25b48e893351504eaac45d2a446e72c656f"} err="failed to get container status \"8b3ddbab01731f04a3cea09cb6aff25b48e893351504eaac45d2a446e72c656f\": rpc error: code = NotFound desc = could not find container \"8b3ddbab01731f04a3cea09cb6aff25b48e893351504eaac45d2a446e72c656f\": container with ID starting with 8b3ddbab01731f04a3cea09cb6aff25b48e893351504eaac45d2a446e72c656f not found: ID does not exist" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.797932 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 05 06:12:26 crc kubenswrapper[4742]: E1205 06:12:26.798427 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab7867d7-6a6b-476e-aadf-ecf260aa91a2" containerName="cinder-api-log" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.798439 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab7867d7-6a6b-476e-aadf-ecf260aa91a2" containerName="cinder-api-log" Dec 05 06:12:26 crc kubenswrapper[4742]: E1205 06:12:26.798448 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab7867d7-6a6b-476e-aadf-ecf260aa91a2" containerName="cinder-api" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.798454 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab7867d7-6a6b-476e-aadf-ecf260aa91a2" containerName="cinder-api" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.798614 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab7867d7-6a6b-476e-aadf-ecf260aa91a2" containerName="cinder-api-log" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.798630 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab7867d7-6a6b-476e-aadf-ecf260aa91a2" containerName="cinder-api" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.799561 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.801532 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.801912 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.802725 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.817631 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.932757 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-config-data-custom\") pod \"cinder-api-0\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " pod="openstack/cinder-api-0" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.932810 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hl9lc\" (UniqueName: \"kubernetes.io/projected/e6063f78-1b45-493e-ae25-62239a1ed5e3-kube-api-access-hl9lc\") pod \"cinder-api-0\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " pod="openstack/cinder-api-0" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.932826 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " pod="openstack/cinder-api-0" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.932868 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e6063f78-1b45-493e-ae25-62239a1ed5e3-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " pod="openstack/cinder-api-0" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.932883 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-config-data\") pod \"cinder-api-0\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " pod="openstack/cinder-api-0" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.932949 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " pod="openstack/cinder-api-0" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.933023 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-public-tls-certs\") pod \"cinder-api-0\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " pod="openstack/cinder-api-0" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.933045 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-scripts\") pod \"cinder-api-0\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " pod="openstack/cinder-api-0" Dec 05 06:12:26 crc kubenswrapper[4742]: I1205 06:12:26.933094 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6063f78-1b45-493e-ae25-62239a1ed5e3-logs\") pod \"cinder-api-0\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " pod="openstack/cinder-api-0" Dec 05 06:12:27 crc kubenswrapper[4742]: I1205 06:12:27.034755 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " pod="openstack/cinder-api-0" Dec 05 06:12:27 crc kubenswrapper[4742]: I1205 06:12:27.034831 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-public-tls-certs\") pod \"cinder-api-0\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " pod="openstack/cinder-api-0" Dec 05 06:12:27 crc kubenswrapper[4742]: I1205 06:12:27.034854 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-scripts\") pod \"cinder-api-0\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " pod="openstack/cinder-api-0" Dec 05 06:12:27 crc kubenswrapper[4742]: I1205 06:12:27.034888 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6063f78-1b45-493e-ae25-62239a1ed5e3-logs\") pod \"cinder-api-0\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " pod="openstack/cinder-api-0" Dec 05 06:12:27 crc kubenswrapper[4742]: I1205 06:12:27.034926 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-config-data-custom\") pod \"cinder-api-0\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " pod="openstack/cinder-api-0" Dec 05 06:12:27 crc kubenswrapper[4742]: I1205 06:12:27.034950 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hl9lc\" (UniqueName: \"kubernetes.io/projected/e6063f78-1b45-493e-ae25-62239a1ed5e3-kube-api-access-hl9lc\") pod \"cinder-api-0\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " pod="openstack/cinder-api-0" Dec 05 06:12:27 crc kubenswrapper[4742]: I1205 06:12:27.034965 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " pod="openstack/cinder-api-0" Dec 05 06:12:27 crc kubenswrapper[4742]: I1205 06:12:27.034997 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e6063f78-1b45-493e-ae25-62239a1ed5e3-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " pod="openstack/cinder-api-0" Dec 05 06:12:27 crc kubenswrapper[4742]: I1205 06:12:27.035012 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-config-data\") pod \"cinder-api-0\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " pod="openstack/cinder-api-0" Dec 05 06:12:27 crc kubenswrapper[4742]: I1205 06:12:27.036345 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6063f78-1b45-493e-ae25-62239a1ed5e3-logs\") pod \"cinder-api-0\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " pod="openstack/cinder-api-0" Dec 05 06:12:27 crc kubenswrapper[4742]: I1205 06:12:27.041416 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-config-data\") pod \"cinder-api-0\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " pod="openstack/cinder-api-0" Dec 05 06:12:27 crc kubenswrapper[4742]: I1205 06:12:27.041698 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e6063f78-1b45-493e-ae25-62239a1ed5e3-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " pod="openstack/cinder-api-0" Dec 05 06:12:27 crc kubenswrapper[4742]: I1205 06:12:27.047112 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-public-tls-certs\") pod \"cinder-api-0\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " pod="openstack/cinder-api-0" Dec 05 06:12:27 crc kubenswrapper[4742]: I1205 06:12:27.047249 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-config-data-custom\") pod \"cinder-api-0\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " pod="openstack/cinder-api-0" Dec 05 06:12:27 crc kubenswrapper[4742]: I1205 06:12:27.047441 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " pod="openstack/cinder-api-0" Dec 05 06:12:27 crc kubenswrapper[4742]: I1205 06:12:27.052295 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " pod="openstack/cinder-api-0" Dec 05 06:12:27 crc kubenswrapper[4742]: I1205 06:12:27.056706 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-scripts\") pod \"cinder-api-0\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " pod="openstack/cinder-api-0" Dec 05 06:12:27 crc kubenswrapper[4742]: I1205 06:12:27.057238 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hl9lc\" (UniqueName: \"kubernetes.io/projected/e6063f78-1b45-493e-ae25-62239a1ed5e3-kube-api-access-hl9lc\") pod \"cinder-api-0\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " pod="openstack/cinder-api-0" Dec 05 06:12:27 crc kubenswrapper[4742]: I1205 06:12:27.127308 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 06:12:27 crc kubenswrapper[4742]: I1205 06:12:27.567439 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 06:12:27 crc kubenswrapper[4742]: I1205 06:12:27.728991 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e6063f78-1b45-493e-ae25-62239a1ed5e3","Type":"ContainerStarted","Data":"e90b9897da1b55b6f2e78939d7972079d11bcc6b94d139a1ddde93d459b419a6"} Dec 05 06:12:28 crc kubenswrapper[4742]: I1205 06:12:28.399013 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab7867d7-6a6b-476e-aadf-ecf260aa91a2" path="/var/lib/kubelet/pods/ab7867d7-6a6b-476e-aadf-ecf260aa91a2/volumes" Dec 05 06:12:28 crc kubenswrapper[4742]: I1205 06:12:28.742195 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c75d285-a5c8-46f4-9a02-46bca7a81694","Type":"ContainerStarted","Data":"e0c246cda86a7ce8a124ffb6eef34b61f5249c7fea70d6d230faadeabcf73213"} Dec 05 06:12:28 crc kubenswrapper[4742]: I1205 06:12:28.742747 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 06:12:28 crc kubenswrapper[4742]: I1205 06:12:28.745865 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e6063f78-1b45-493e-ae25-62239a1ed5e3","Type":"ContainerStarted","Data":"873f59d3fccd6e3aebb3bd5b7bfff039e46a5ac7aa542e462b2ded2d505ccf92"} Dec 05 06:12:28 crc kubenswrapper[4742]: I1205 06:12:28.767790 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.473231097 podStartE2EDuration="5.767768451s" podCreationTimestamp="2025-12-05 06:12:23 +0000 UTC" firstStartedPulling="2025-12-05 06:12:24.425122289 +0000 UTC m=+1220.337257351" lastFinishedPulling="2025-12-05 06:12:27.719659643 +0000 UTC m=+1223.631794705" observedRunningTime="2025-12-05 06:12:28.760639831 +0000 UTC m=+1224.672774913" watchObservedRunningTime="2025-12-05 06:12:28.767768451 +0000 UTC m=+1224.679903513" Dec 05 06:12:29 crc kubenswrapper[4742]: I1205 06:12:29.757179 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e6063f78-1b45-493e-ae25-62239a1ed5e3","Type":"ContainerStarted","Data":"9ddfbe5ffee29c713d306ff006d773b5b100e240b7d408ad28e4d4bab8088896"} Dec 05 06:12:29 crc kubenswrapper[4742]: I1205 06:12:29.757906 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 05 06:12:29 crc kubenswrapper[4742]: I1205 06:12:29.793293 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.793272627 podStartE2EDuration="3.793272627s" podCreationTimestamp="2025-12-05 06:12:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:12:29.774918039 +0000 UTC m=+1225.687053131" watchObservedRunningTime="2025-12-05 06:12:29.793272627 +0000 UTC m=+1225.705407699" Dec 05 06:12:30 crc kubenswrapper[4742]: I1205 06:12:30.042509 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-7dcd478554-rrcm8" Dec 05 06:12:30 crc kubenswrapper[4742]: I1205 06:12:30.133993 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6bd586bb98-kn6cb" Dec 05 06:12:30 crc kubenswrapper[4742]: I1205 06:12:30.150123 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6bd586bb98-kn6cb" Dec 05 06:12:30 crc kubenswrapper[4742]: I1205 06:12:30.151126 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6578955fd5-46pxh" Dec 05 06:12:30 crc kubenswrapper[4742]: I1205 06:12:30.270366 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-m9skl"] Dec 05 06:12:30 crc kubenswrapper[4742]: I1205 06:12:30.270619 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6b7b667979-m9skl" podUID="8620635a-6cc9-4c28-9a23-46017882bcb2" containerName="dnsmasq-dns" containerID="cri-o://d4b4d53a61d568d27cd31811531077ba4d61d36b10ce070641d0523f22a892e8" gracePeriod=10 Dec 05 06:12:30 crc kubenswrapper[4742]: I1205 06:12:30.409041 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 05 06:12:30 crc kubenswrapper[4742]: I1205 06:12:30.469027 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 06:12:30 crc kubenswrapper[4742]: I1205 06:12:30.766305 4742 generic.go:334] "Generic (PLEG): container finished" podID="8620635a-6cc9-4c28-9a23-46017882bcb2" containerID="d4b4d53a61d568d27cd31811531077ba4d61d36b10ce070641d0523f22a892e8" exitCode=0 Dec 05 06:12:30 crc kubenswrapper[4742]: I1205 06:12:30.766481 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-m9skl" event={"ID":"8620635a-6cc9-4c28-9a23-46017882bcb2","Type":"ContainerDied","Data":"d4b4d53a61d568d27cd31811531077ba4d61d36b10ce070641d0523f22a892e8"} Dec 05 06:12:30 crc kubenswrapper[4742]: I1205 06:12:30.766511 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-m9skl" event={"ID":"8620635a-6cc9-4c28-9a23-46017882bcb2","Type":"ContainerDied","Data":"ebb7e4671e61f51a4f664e48658a0b047544c021ef12eb35cbe367a74d9bef05"} Dec 05 06:12:30 crc kubenswrapper[4742]: I1205 06:12:30.766526 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ebb7e4671e61f51a4f664e48658a0b047544c021ef12eb35cbe367a74d9bef05" Dec 05 06:12:30 crc kubenswrapper[4742]: I1205 06:12:30.767719 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="acb321ed-2f39-4c68-bafd-1eabb744fee9" containerName="cinder-scheduler" containerID="cri-o://15f60914af2112465ebf23d1b3f7799dd5741321a833d188d7b7a0ab32e96194" gracePeriod=30 Dec 05 06:12:30 crc kubenswrapper[4742]: I1205 06:12:30.767788 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="acb321ed-2f39-4c68-bafd-1eabb744fee9" containerName="probe" containerID="cri-o://4ccf56ad8bfa048ecc6e15151ff71aa24d402ac41a1188da330cfb7757a8cfe6" gracePeriod=30 Dec 05 06:12:30 crc kubenswrapper[4742]: I1205 06:12:30.797389 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-m9skl" Dec 05 06:12:30 crc kubenswrapper[4742]: I1205 06:12:30.928854 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8620635a-6cc9-4c28-9a23-46017882bcb2-dns-svc\") pod \"8620635a-6cc9-4c28-9a23-46017882bcb2\" (UID: \"8620635a-6cc9-4c28-9a23-46017882bcb2\") " Dec 05 06:12:30 crc kubenswrapper[4742]: I1205 06:12:30.928907 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8620635a-6cc9-4c28-9a23-46017882bcb2-dns-swift-storage-0\") pod \"8620635a-6cc9-4c28-9a23-46017882bcb2\" (UID: \"8620635a-6cc9-4c28-9a23-46017882bcb2\") " Dec 05 06:12:30 crc kubenswrapper[4742]: I1205 06:12:30.929018 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8620635a-6cc9-4c28-9a23-46017882bcb2-ovsdbserver-nb\") pod \"8620635a-6cc9-4c28-9a23-46017882bcb2\" (UID: \"8620635a-6cc9-4c28-9a23-46017882bcb2\") " Dec 05 06:12:30 crc kubenswrapper[4742]: I1205 06:12:30.929044 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dzwn6\" (UniqueName: \"kubernetes.io/projected/8620635a-6cc9-4c28-9a23-46017882bcb2-kube-api-access-dzwn6\") pod \"8620635a-6cc9-4c28-9a23-46017882bcb2\" (UID: \"8620635a-6cc9-4c28-9a23-46017882bcb2\") " Dec 05 06:12:30 crc kubenswrapper[4742]: I1205 06:12:30.929084 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8620635a-6cc9-4c28-9a23-46017882bcb2-config\") pod \"8620635a-6cc9-4c28-9a23-46017882bcb2\" (UID: \"8620635a-6cc9-4c28-9a23-46017882bcb2\") " Dec 05 06:12:30 crc kubenswrapper[4742]: I1205 06:12:30.929229 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8620635a-6cc9-4c28-9a23-46017882bcb2-ovsdbserver-sb\") pod \"8620635a-6cc9-4c28-9a23-46017882bcb2\" (UID: \"8620635a-6cc9-4c28-9a23-46017882bcb2\") " Dec 05 06:12:30 crc kubenswrapper[4742]: I1205 06:12:30.948610 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8620635a-6cc9-4c28-9a23-46017882bcb2-kube-api-access-dzwn6" (OuterVolumeSpecName: "kube-api-access-dzwn6") pod "8620635a-6cc9-4c28-9a23-46017882bcb2" (UID: "8620635a-6cc9-4c28-9a23-46017882bcb2"). InnerVolumeSpecName "kube-api-access-dzwn6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:12:30 crc kubenswrapper[4742]: I1205 06:12:30.991680 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8620635a-6cc9-4c28-9a23-46017882bcb2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8620635a-6cc9-4c28-9a23-46017882bcb2" (UID: "8620635a-6cc9-4c28-9a23-46017882bcb2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:12:30 crc kubenswrapper[4742]: I1205 06:12:30.998508 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8620635a-6cc9-4c28-9a23-46017882bcb2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8620635a-6cc9-4c28-9a23-46017882bcb2" (UID: "8620635a-6cc9-4c28-9a23-46017882bcb2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:12:31 crc kubenswrapper[4742]: I1205 06:12:31.004795 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8620635a-6cc9-4c28-9a23-46017882bcb2-config" (OuterVolumeSpecName: "config") pod "8620635a-6cc9-4c28-9a23-46017882bcb2" (UID: "8620635a-6cc9-4c28-9a23-46017882bcb2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:12:31 crc kubenswrapper[4742]: I1205 06:12:31.007186 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8620635a-6cc9-4c28-9a23-46017882bcb2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8620635a-6cc9-4c28-9a23-46017882bcb2" (UID: "8620635a-6cc9-4c28-9a23-46017882bcb2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:12:31 crc kubenswrapper[4742]: I1205 06:12:31.012584 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8620635a-6cc9-4c28-9a23-46017882bcb2-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "8620635a-6cc9-4c28-9a23-46017882bcb2" (UID: "8620635a-6cc9-4c28-9a23-46017882bcb2"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:12:31 crc kubenswrapper[4742]: I1205 06:12:31.031367 4742 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8620635a-6cc9-4c28-9a23-46017882bcb2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:31 crc kubenswrapper[4742]: I1205 06:12:31.031413 4742 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8620635a-6cc9-4c28-9a23-46017882bcb2-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:31 crc kubenswrapper[4742]: I1205 06:12:31.031426 4742 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8620635a-6cc9-4c28-9a23-46017882bcb2-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:31 crc kubenswrapper[4742]: I1205 06:12:31.031435 4742 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8620635a-6cc9-4c28-9a23-46017882bcb2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:31 crc kubenswrapper[4742]: I1205 06:12:31.031443 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dzwn6\" (UniqueName: \"kubernetes.io/projected/8620635a-6cc9-4c28-9a23-46017882bcb2-kube-api-access-dzwn6\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:31 crc kubenswrapper[4742]: I1205 06:12:31.031453 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8620635a-6cc9-4c28-9a23-46017882bcb2-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:31 crc kubenswrapper[4742]: I1205 06:12:31.775753 4742 generic.go:334] "Generic (PLEG): container finished" podID="acb321ed-2f39-4c68-bafd-1eabb744fee9" containerID="4ccf56ad8bfa048ecc6e15151ff71aa24d402ac41a1188da330cfb7757a8cfe6" exitCode=0 Dec 05 06:12:31 crc kubenswrapper[4742]: I1205 06:12:31.775828 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"acb321ed-2f39-4c68-bafd-1eabb744fee9","Type":"ContainerDied","Data":"4ccf56ad8bfa048ecc6e15151ff71aa24d402ac41a1188da330cfb7757a8cfe6"} Dec 05 06:12:31 crc kubenswrapper[4742]: I1205 06:12:31.775855 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-m9skl" Dec 05 06:12:31 crc kubenswrapper[4742]: I1205 06:12:31.810556 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-m9skl"] Dec 05 06:12:31 crc kubenswrapper[4742]: I1205 06:12:31.821669 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-m9skl"] Dec 05 06:12:32 crc kubenswrapper[4742]: I1205 06:12:32.401270 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8620635a-6cc9-4c28-9a23-46017882bcb2" path="/var/lib/kubelet/pods/8620635a-6cc9-4c28-9a23-46017882bcb2/volumes" Dec 05 06:12:32 crc kubenswrapper[4742]: I1205 06:12:32.511471 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-574f89688c-hbh7m" Dec 05 06:12:32 crc kubenswrapper[4742]: I1205 06:12:32.615390 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7dcd478554-rrcm8"] Dec 05 06:12:32 crc kubenswrapper[4742]: I1205 06:12:32.615765 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7dcd478554-rrcm8" podUID="a1f68ebd-adfb-406e-ac16-d14599ea9bc3" containerName="neutron-api" containerID="cri-o://60e83da0e60acdbe1f32ff6eba4e9b83b35dd5e1aefbfa932805f3c182231fc8" gracePeriod=30 Dec 05 06:12:32 crc kubenswrapper[4742]: I1205 06:12:32.615876 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7dcd478554-rrcm8" podUID="a1f68ebd-adfb-406e-ac16-d14599ea9bc3" containerName="neutron-httpd" containerID="cri-o://6ecbe3675537cc34ac2ba78f89c09921c117948ce41e6442e7c86331efe37a8e" gracePeriod=30 Dec 05 06:12:33 crc kubenswrapper[4742]: E1205 06:12:33.557717 4742 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podacb321ed_2f39_4c68_bafd_1eabb744fee9.slice/crio-conmon-15f60914af2112465ebf23d1b3f7799dd5741321a833d188d7b7a0ab32e96194.scope\": RecentStats: unable to find data in memory cache]" Dec 05 06:12:33 crc kubenswrapper[4742]: I1205 06:12:33.797379 4742 generic.go:334] "Generic (PLEG): container finished" podID="a1f68ebd-adfb-406e-ac16-d14599ea9bc3" containerID="6ecbe3675537cc34ac2ba78f89c09921c117948ce41e6442e7c86331efe37a8e" exitCode=0 Dec 05 06:12:33 crc kubenswrapper[4742]: I1205 06:12:33.797464 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7dcd478554-rrcm8" event={"ID":"a1f68ebd-adfb-406e-ac16-d14599ea9bc3","Type":"ContainerDied","Data":"6ecbe3675537cc34ac2ba78f89c09921c117948ce41e6442e7c86331efe37a8e"} Dec 05 06:12:33 crc kubenswrapper[4742]: I1205 06:12:33.801029 4742 generic.go:334] "Generic (PLEG): container finished" podID="acb321ed-2f39-4c68-bafd-1eabb744fee9" containerID="15f60914af2112465ebf23d1b3f7799dd5741321a833d188d7b7a0ab32e96194" exitCode=0 Dec 05 06:12:33 crc kubenswrapper[4742]: I1205 06:12:33.801082 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"acb321ed-2f39-4c68-bafd-1eabb744fee9","Type":"ContainerDied","Data":"15f60914af2112465ebf23d1b3f7799dd5741321a833d188d7b7a0ab32e96194"} Dec 05 06:12:33 crc kubenswrapper[4742]: I1205 06:12:33.801107 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"acb321ed-2f39-4c68-bafd-1eabb744fee9","Type":"ContainerDied","Data":"e38bd9817dfd597dfd55a46701fe70ba64789aa4ee3f505015bfea857a70a312"} Dec 05 06:12:33 crc kubenswrapper[4742]: I1205 06:12:33.801121 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e38bd9817dfd597dfd55a46701fe70ba64789aa4ee3f505015bfea857a70a312" Dec 05 06:12:33 crc kubenswrapper[4742]: I1205 06:12:33.850680 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 06:12:33 crc kubenswrapper[4742]: I1205 06:12:33.879639 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/acb321ed-2f39-4c68-bafd-1eabb744fee9-config-data-custom\") pod \"acb321ed-2f39-4c68-bafd-1eabb744fee9\" (UID: \"acb321ed-2f39-4c68-bafd-1eabb744fee9\") " Dec 05 06:12:33 crc kubenswrapper[4742]: I1205 06:12:33.879868 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/acb321ed-2f39-4c68-bafd-1eabb744fee9-etc-machine-id\") pod \"acb321ed-2f39-4c68-bafd-1eabb744fee9\" (UID: \"acb321ed-2f39-4c68-bafd-1eabb744fee9\") " Dec 05 06:12:33 crc kubenswrapper[4742]: I1205 06:12:33.879995 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acb321ed-2f39-4c68-bafd-1eabb744fee9-combined-ca-bundle\") pod \"acb321ed-2f39-4c68-bafd-1eabb744fee9\" (UID: \"acb321ed-2f39-4c68-bafd-1eabb744fee9\") " Dec 05 06:12:33 crc kubenswrapper[4742]: I1205 06:12:33.880046 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g8xf9\" (UniqueName: \"kubernetes.io/projected/acb321ed-2f39-4c68-bafd-1eabb744fee9-kube-api-access-g8xf9\") pod \"acb321ed-2f39-4c68-bafd-1eabb744fee9\" (UID: \"acb321ed-2f39-4c68-bafd-1eabb744fee9\") " Dec 05 06:12:33 crc kubenswrapper[4742]: I1205 06:12:33.880113 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/acb321ed-2f39-4c68-bafd-1eabb744fee9-scripts\") pod \"acb321ed-2f39-4c68-bafd-1eabb744fee9\" (UID: \"acb321ed-2f39-4c68-bafd-1eabb744fee9\") " Dec 05 06:12:33 crc kubenswrapper[4742]: I1205 06:12:33.880167 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acb321ed-2f39-4c68-bafd-1eabb744fee9-config-data\") pod \"acb321ed-2f39-4c68-bafd-1eabb744fee9\" (UID: \"acb321ed-2f39-4c68-bafd-1eabb744fee9\") " Dec 05 06:12:33 crc kubenswrapper[4742]: I1205 06:12:33.881137 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/acb321ed-2f39-4c68-bafd-1eabb744fee9-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "acb321ed-2f39-4c68-bafd-1eabb744fee9" (UID: "acb321ed-2f39-4c68-bafd-1eabb744fee9"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:12:33 crc kubenswrapper[4742]: I1205 06:12:33.881462 4742 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/acb321ed-2f39-4c68-bafd-1eabb744fee9-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:33 crc kubenswrapper[4742]: I1205 06:12:33.919037 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acb321ed-2f39-4c68-bafd-1eabb744fee9-scripts" (OuterVolumeSpecName: "scripts") pod "acb321ed-2f39-4c68-bafd-1eabb744fee9" (UID: "acb321ed-2f39-4c68-bafd-1eabb744fee9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:33 crc kubenswrapper[4742]: I1205 06:12:33.923300 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acb321ed-2f39-4c68-bafd-1eabb744fee9-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "acb321ed-2f39-4c68-bafd-1eabb744fee9" (UID: "acb321ed-2f39-4c68-bafd-1eabb744fee9"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:33 crc kubenswrapper[4742]: I1205 06:12:33.926519 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/acb321ed-2f39-4c68-bafd-1eabb744fee9-kube-api-access-g8xf9" (OuterVolumeSpecName: "kube-api-access-g8xf9") pod "acb321ed-2f39-4c68-bafd-1eabb744fee9" (UID: "acb321ed-2f39-4c68-bafd-1eabb744fee9"). InnerVolumeSpecName "kube-api-access-g8xf9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:12:33 crc kubenswrapper[4742]: I1205 06:12:33.983271 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g8xf9\" (UniqueName: \"kubernetes.io/projected/acb321ed-2f39-4c68-bafd-1eabb744fee9-kube-api-access-g8xf9\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:33 crc kubenswrapper[4742]: I1205 06:12:33.983596 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/acb321ed-2f39-4c68-bafd-1eabb744fee9-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:33 crc kubenswrapper[4742]: I1205 06:12:33.983608 4742 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/acb321ed-2f39-4c68-bafd-1eabb744fee9-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:33 crc kubenswrapper[4742]: I1205 06:12:33.989148 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acb321ed-2f39-4c68-bafd-1eabb744fee9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "acb321ed-2f39-4c68-bafd-1eabb744fee9" (UID: "acb321ed-2f39-4c68-bafd-1eabb744fee9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:34 crc kubenswrapper[4742]: I1205 06:12:34.053205 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acb321ed-2f39-4c68-bafd-1eabb744fee9-config-data" (OuterVolumeSpecName: "config-data") pod "acb321ed-2f39-4c68-bafd-1eabb744fee9" (UID: "acb321ed-2f39-4c68-bafd-1eabb744fee9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:34 crc kubenswrapper[4742]: I1205 06:12:34.085201 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acb321ed-2f39-4c68-bafd-1eabb744fee9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:34 crc kubenswrapper[4742]: I1205 06:12:34.085235 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acb321ed-2f39-4c68-bafd-1eabb744fee9-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:34 crc kubenswrapper[4742]: I1205 06:12:34.666533 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-765b847d64-jgxg4" Dec 05 06:12:34 crc kubenswrapper[4742]: I1205 06:12:34.693576 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-765b847d64-jgxg4" Dec 05 06:12:34 crc kubenswrapper[4742]: I1205 06:12:34.810645 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 06:12:34 crc kubenswrapper[4742]: I1205 06:12:34.837501 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 06:12:34 crc kubenswrapper[4742]: I1205 06:12:34.851284 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 06:12:34 crc kubenswrapper[4742]: I1205 06:12:34.876466 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 06:12:34 crc kubenswrapper[4742]: E1205 06:12:34.876837 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8620635a-6cc9-4c28-9a23-46017882bcb2" containerName="dnsmasq-dns" Dec 05 06:12:34 crc kubenswrapper[4742]: I1205 06:12:34.876856 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="8620635a-6cc9-4c28-9a23-46017882bcb2" containerName="dnsmasq-dns" Dec 05 06:12:34 crc kubenswrapper[4742]: E1205 06:12:34.876869 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acb321ed-2f39-4c68-bafd-1eabb744fee9" containerName="cinder-scheduler" Dec 05 06:12:34 crc kubenswrapper[4742]: I1205 06:12:34.876875 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="acb321ed-2f39-4c68-bafd-1eabb744fee9" containerName="cinder-scheduler" Dec 05 06:12:34 crc kubenswrapper[4742]: E1205 06:12:34.876896 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acb321ed-2f39-4c68-bafd-1eabb744fee9" containerName="probe" Dec 05 06:12:34 crc kubenswrapper[4742]: I1205 06:12:34.876933 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="acb321ed-2f39-4c68-bafd-1eabb744fee9" containerName="probe" Dec 05 06:12:34 crc kubenswrapper[4742]: E1205 06:12:34.876960 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8620635a-6cc9-4c28-9a23-46017882bcb2" containerName="init" Dec 05 06:12:34 crc kubenswrapper[4742]: I1205 06:12:34.876966 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="8620635a-6cc9-4c28-9a23-46017882bcb2" containerName="init" Dec 05 06:12:34 crc kubenswrapper[4742]: I1205 06:12:34.877244 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="acb321ed-2f39-4c68-bafd-1eabb744fee9" containerName="cinder-scheduler" Dec 05 06:12:34 crc kubenswrapper[4742]: I1205 06:12:34.877264 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="acb321ed-2f39-4c68-bafd-1eabb744fee9" containerName="probe" Dec 05 06:12:34 crc kubenswrapper[4742]: I1205 06:12:34.877282 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="8620635a-6cc9-4c28-9a23-46017882bcb2" containerName="dnsmasq-dns" Dec 05 06:12:34 crc kubenswrapper[4742]: I1205 06:12:34.878534 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 06:12:34 crc kubenswrapper[4742]: I1205 06:12:34.881971 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 05 06:12:34 crc kubenswrapper[4742]: I1205 06:12:34.897076 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 06:12:34 crc kubenswrapper[4742]: I1205 06:12:34.999329 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b535626-d96c-4843-bc25-c4fafa967b23-scripts\") pod \"cinder-scheduler-0\" (UID: \"3b535626-d96c-4843-bc25-c4fafa967b23\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:34 crc kubenswrapper[4742]: I1205 06:12:34.999373 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3b535626-d96c-4843-bc25-c4fafa967b23-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"3b535626-d96c-4843-bc25-c4fafa967b23\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:34 crc kubenswrapper[4742]: I1205 06:12:34.999401 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3b535626-d96c-4843-bc25-c4fafa967b23-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"3b535626-d96c-4843-bc25-c4fafa967b23\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:34 crc kubenswrapper[4742]: I1205 06:12:34.999427 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pgq5\" (UniqueName: \"kubernetes.io/projected/3b535626-d96c-4843-bc25-c4fafa967b23-kube-api-access-5pgq5\") pod \"cinder-scheduler-0\" (UID: \"3b535626-d96c-4843-bc25-c4fafa967b23\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:34 crc kubenswrapper[4742]: I1205 06:12:34.999475 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b535626-d96c-4843-bc25-c4fafa967b23-config-data\") pod \"cinder-scheduler-0\" (UID: \"3b535626-d96c-4843-bc25-c4fafa967b23\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:34 crc kubenswrapper[4742]: I1205 06:12:34.999542 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b535626-d96c-4843-bc25-c4fafa967b23-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"3b535626-d96c-4843-bc25-c4fafa967b23\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:35 crc kubenswrapper[4742]: I1205 06:12:35.101715 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b535626-d96c-4843-bc25-c4fafa967b23-config-data\") pod \"cinder-scheduler-0\" (UID: \"3b535626-d96c-4843-bc25-c4fafa967b23\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:35 crc kubenswrapper[4742]: I1205 06:12:35.101885 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b535626-d96c-4843-bc25-c4fafa967b23-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"3b535626-d96c-4843-bc25-c4fafa967b23\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:35 crc kubenswrapper[4742]: I1205 06:12:35.101952 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b535626-d96c-4843-bc25-c4fafa967b23-scripts\") pod \"cinder-scheduler-0\" (UID: \"3b535626-d96c-4843-bc25-c4fafa967b23\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:35 crc kubenswrapper[4742]: I1205 06:12:35.102004 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3b535626-d96c-4843-bc25-c4fafa967b23-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"3b535626-d96c-4843-bc25-c4fafa967b23\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:35 crc kubenswrapper[4742]: I1205 06:12:35.102042 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3b535626-d96c-4843-bc25-c4fafa967b23-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"3b535626-d96c-4843-bc25-c4fafa967b23\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:35 crc kubenswrapper[4742]: I1205 06:12:35.102105 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pgq5\" (UniqueName: \"kubernetes.io/projected/3b535626-d96c-4843-bc25-c4fafa967b23-kube-api-access-5pgq5\") pod \"cinder-scheduler-0\" (UID: \"3b535626-d96c-4843-bc25-c4fafa967b23\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:35 crc kubenswrapper[4742]: I1205 06:12:35.103783 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3b535626-d96c-4843-bc25-c4fafa967b23-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"3b535626-d96c-4843-bc25-c4fafa967b23\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:35 crc kubenswrapper[4742]: I1205 06:12:35.114880 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3b535626-d96c-4843-bc25-c4fafa967b23-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"3b535626-d96c-4843-bc25-c4fafa967b23\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:35 crc kubenswrapper[4742]: I1205 06:12:35.117306 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b535626-d96c-4843-bc25-c4fafa967b23-config-data\") pod \"cinder-scheduler-0\" (UID: \"3b535626-d96c-4843-bc25-c4fafa967b23\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:35 crc kubenswrapper[4742]: I1205 06:12:35.118138 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b535626-d96c-4843-bc25-c4fafa967b23-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"3b535626-d96c-4843-bc25-c4fafa967b23\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:35 crc kubenswrapper[4742]: I1205 06:12:35.128822 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b535626-d96c-4843-bc25-c4fafa967b23-scripts\") pod \"cinder-scheduler-0\" (UID: \"3b535626-d96c-4843-bc25-c4fafa967b23\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:35 crc kubenswrapper[4742]: I1205 06:12:35.136656 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pgq5\" (UniqueName: \"kubernetes.io/projected/3b535626-d96c-4843-bc25-c4fafa967b23-kube-api-access-5pgq5\") pod \"cinder-scheduler-0\" (UID: \"3b535626-d96c-4843-bc25-c4fafa967b23\") " pod="openstack/cinder-scheduler-0" Dec 05 06:12:35 crc kubenswrapper[4742]: I1205 06:12:35.209075 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 06:12:35 crc kubenswrapper[4742]: I1205 06:12:35.718250 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 06:12:35 crc kubenswrapper[4742]: I1205 06:12:35.831442 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"3b535626-d96c-4843-bc25-c4fafa967b23","Type":"ContainerStarted","Data":"9deb350faf5db1ec38aca15ec8bff9a43fe2fb9c0b6da1c7036ec9571f4e0bd0"} Dec 05 06:12:36 crc kubenswrapper[4742]: I1205 06:12:36.404578 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="acb321ed-2f39-4c68-bafd-1eabb744fee9" path="/var/lib/kubelet/pods/acb321ed-2f39-4c68-bafd-1eabb744fee9/volumes" Dec 05 06:12:36 crc kubenswrapper[4742]: I1205 06:12:36.772143 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5b594b6ccb-vbxpj" Dec 05 06:12:36 crc kubenswrapper[4742]: I1205 06:12:36.842577 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"3b535626-d96c-4843-bc25-c4fafa967b23","Type":"ContainerStarted","Data":"6ee0e1f6ed8fc4033483315f49001dac70cdc9d56d231f0ed6e4bf14ed5391bf"} Dec 05 06:12:36 crc kubenswrapper[4742]: I1205 06:12:36.964332 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5b594b6ccb-vbxpj" Dec 05 06:12:37 crc kubenswrapper[4742]: I1205 06:12:37.033661 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6bd586bb98-kn6cb"] Dec 05 06:12:37 crc kubenswrapper[4742]: I1205 06:12:37.039703 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6bd586bb98-kn6cb" podUID="e1170877-6a94-478f-b6de-75fc8dd2c13e" containerName="barbican-api-log" containerID="cri-o://8682ebf1f92c0afc545cd78f7740e9bdde156899892d295e085f0160519a75a6" gracePeriod=30 Dec 05 06:12:37 crc kubenswrapper[4742]: I1205 06:12:37.040614 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-6bd586bb98-kn6cb" podUID="e1170877-6a94-478f-b6de-75fc8dd2c13e" containerName="barbican-api" containerID="cri-o://a014a2ad7cc327d9d56678a678354339e1837cc39922551432492a1b6675b135" gracePeriod=30 Dec 05 06:12:37 crc kubenswrapper[4742]: I1205 06:12:37.851766 4742 generic.go:334] "Generic (PLEG): container finished" podID="e1170877-6a94-478f-b6de-75fc8dd2c13e" containerID="8682ebf1f92c0afc545cd78f7740e9bdde156899892d295e085f0160519a75a6" exitCode=143 Dec 05 06:12:37 crc kubenswrapper[4742]: I1205 06:12:37.851859 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6bd586bb98-kn6cb" event={"ID":"e1170877-6a94-478f-b6de-75fc8dd2c13e","Type":"ContainerDied","Data":"8682ebf1f92c0afc545cd78f7740e9bdde156899892d295e085f0160519a75a6"} Dec 05 06:12:37 crc kubenswrapper[4742]: I1205 06:12:37.853758 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"3b535626-d96c-4843-bc25-c4fafa967b23","Type":"ContainerStarted","Data":"a6798471637e201a3f0d2d87ce22e1f621bb66ed7382d07497c5dc7f71a7d869"} Dec 05 06:12:37 crc kubenswrapper[4742]: I1205 06:12:37.875228 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.875207847 podStartE2EDuration="3.875207847s" podCreationTimestamp="2025-12-05 06:12:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:12:37.868953511 +0000 UTC m=+1233.781088583" watchObservedRunningTime="2025-12-05 06:12:37.875207847 +0000 UTC m=+1233.787342929" Dec 05 06:12:38 crc kubenswrapper[4742]: I1205 06:12:38.808118 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7dcd478554-rrcm8" Dec 05 06:12:38 crc kubenswrapper[4742]: I1205 06:12:38.869262 4742 generic.go:334] "Generic (PLEG): container finished" podID="a1f68ebd-adfb-406e-ac16-d14599ea9bc3" containerID="60e83da0e60acdbe1f32ff6eba4e9b83b35dd5e1aefbfa932805f3c182231fc8" exitCode=0 Dec 05 06:12:38 crc kubenswrapper[4742]: I1205 06:12:38.869319 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7dcd478554-rrcm8" event={"ID":"a1f68ebd-adfb-406e-ac16-d14599ea9bc3","Type":"ContainerDied","Data":"60e83da0e60acdbe1f32ff6eba4e9b83b35dd5e1aefbfa932805f3c182231fc8"} Dec 05 06:12:38 crc kubenswrapper[4742]: I1205 06:12:38.869390 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7dcd478554-rrcm8" event={"ID":"a1f68ebd-adfb-406e-ac16-d14599ea9bc3","Type":"ContainerDied","Data":"193b4471666416cb77c41f8fc7cc0a723745a171e3bb205aed8ec6e4b68569c2"} Dec 05 06:12:38 crc kubenswrapper[4742]: I1205 06:12:38.869408 4742 scope.go:117] "RemoveContainer" containerID="6ecbe3675537cc34ac2ba78f89c09921c117948ce41e6442e7c86331efe37a8e" Dec 05 06:12:38 crc kubenswrapper[4742]: I1205 06:12:38.869446 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7dcd478554-rrcm8" Dec 05 06:12:38 crc kubenswrapper[4742]: I1205 06:12:38.884348 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a1f68ebd-adfb-406e-ac16-d14599ea9bc3-httpd-config\") pod \"a1f68ebd-adfb-406e-ac16-d14599ea9bc3\" (UID: \"a1f68ebd-adfb-406e-ac16-d14599ea9bc3\") " Dec 05 06:12:38 crc kubenswrapper[4742]: I1205 06:12:38.884578 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1f68ebd-adfb-406e-ac16-d14599ea9bc3-ovndb-tls-certs\") pod \"a1f68ebd-adfb-406e-ac16-d14599ea9bc3\" (UID: \"a1f68ebd-adfb-406e-ac16-d14599ea9bc3\") " Dec 05 06:12:38 crc kubenswrapper[4742]: I1205 06:12:38.884629 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hfs88\" (UniqueName: \"kubernetes.io/projected/a1f68ebd-adfb-406e-ac16-d14599ea9bc3-kube-api-access-hfs88\") pod \"a1f68ebd-adfb-406e-ac16-d14599ea9bc3\" (UID: \"a1f68ebd-adfb-406e-ac16-d14599ea9bc3\") " Dec 05 06:12:38 crc kubenswrapper[4742]: I1205 06:12:38.884767 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1f68ebd-adfb-406e-ac16-d14599ea9bc3-combined-ca-bundle\") pod \"a1f68ebd-adfb-406e-ac16-d14599ea9bc3\" (UID: \"a1f68ebd-adfb-406e-ac16-d14599ea9bc3\") " Dec 05 06:12:38 crc kubenswrapper[4742]: I1205 06:12:38.884828 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a1f68ebd-adfb-406e-ac16-d14599ea9bc3-config\") pod \"a1f68ebd-adfb-406e-ac16-d14599ea9bc3\" (UID: \"a1f68ebd-adfb-406e-ac16-d14599ea9bc3\") " Dec 05 06:12:38 crc kubenswrapper[4742]: I1205 06:12:38.908207 4742 scope.go:117] "RemoveContainer" containerID="60e83da0e60acdbe1f32ff6eba4e9b83b35dd5e1aefbfa932805f3c182231fc8" Dec 05 06:12:38 crc kubenswrapper[4742]: I1205 06:12:38.908401 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1f68ebd-adfb-406e-ac16-d14599ea9bc3-kube-api-access-hfs88" (OuterVolumeSpecName: "kube-api-access-hfs88") pod "a1f68ebd-adfb-406e-ac16-d14599ea9bc3" (UID: "a1f68ebd-adfb-406e-ac16-d14599ea9bc3"). InnerVolumeSpecName "kube-api-access-hfs88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:12:38 crc kubenswrapper[4742]: I1205 06:12:38.922303 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1f68ebd-adfb-406e-ac16-d14599ea9bc3-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "a1f68ebd-adfb-406e-ac16-d14599ea9bc3" (UID: "a1f68ebd-adfb-406e-ac16-d14599ea9bc3"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:38 crc kubenswrapper[4742]: I1205 06:12:38.987288 4742 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a1f68ebd-adfb-406e-ac16-d14599ea9bc3-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:38 crc kubenswrapper[4742]: I1205 06:12:38.987507 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hfs88\" (UniqueName: \"kubernetes.io/projected/a1f68ebd-adfb-406e-ac16-d14599ea9bc3-kube-api-access-hfs88\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:39 crc kubenswrapper[4742]: I1205 06:12:39.034189 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1f68ebd-adfb-406e-ac16-d14599ea9bc3-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "a1f68ebd-adfb-406e-ac16-d14599ea9bc3" (UID: "a1f68ebd-adfb-406e-ac16-d14599ea9bc3"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:39 crc kubenswrapper[4742]: I1205 06:12:39.034851 4742 scope.go:117] "RemoveContainer" containerID="6ecbe3675537cc34ac2ba78f89c09921c117948ce41e6442e7c86331efe37a8e" Dec 05 06:12:39 crc kubenswrapper[4742]: E1205 06:12:39.035354 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ecbe3675537cc34ac2ba78f89c09921c117948ce41e6442e7c86331efe37a8e\": container with ID starting with 6ecbe3675537cc34ac2ba78f89c09921c117948ce41e6442e7c86331efe37a8e not found: ID does not exist" containerID="6ecbe3675537cc34ac2ba78f89c09921c117948ce41e6442e7c86331efe37a8e" Dec 05 06:12:39 crc kubenswrapper[4742]: I1205 06:12:39.035451 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ecbe3675537cc34ac2ba78f89c09921c117948ce41e6442e7c86331efe37a8e"} err="failed to get container status \"6ecbe3675537cc34ac2ba78f89c09921c117948ce41e6442e7c86331efe37a8e\": rpc error: code = NotFound desc = could not find container \"6ecbe3675537cc34ac2ba78f89c09921c117948ce41e6442e7c86331efe37a8e\": container with ID starting with 6ecbe3675537cc34ac2ba78f89c09921c117948ce41e6442e7c86331efe37a8e not found: ID does not exist" Dec 05 06:12:39 crc kubenswrapper[4742]: I1205 06:12:39.035531 4742 scope.go:117] "RemoveContainer" containerID="60e83da0e60acdbe1f32ff6eba4e9b83b35dd5e1aefbfa932805f3c182231fc8" Dec 05 06:12:39 crc kubenswrapper[4742]: E1205 06:12:39.035746 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60e83da0e60acdbe1f32ff6eba4e9b83b35dd5e1aefbfa932805f3c182231fc8\": container with ID starting with 60e83da0e60acdbe1f32ff6eba4e9b83b35dd5e1aefbfa932805f3c182231fc8 not found: ID does not exist" containerID="60e83da0e60acdbe1f32ff6eba4e9b83b35dd5e1aefbfa932805f3c182231fc8" Dec 05 06:12:39 crc kubenswrapper[4742]: I1205 06:12:39.035827 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60e83da0e60acdbe1f32ff6eba4e9b83b35dd5e1aefbfa932805f3c182231fc8"} err="failed to get container status \"60e83da0e60acdbe1f32ff6eba4e9b83b35dd5e1aefbfa932805f3c182231fc8\": rpc error: code = NotFound desc = could not find container \"60e83da0e60acdbe1f32ff6eba4e9b83b35dd5e1aefbfa932805f3c182231fc8\": container with ID starting with 60e83da0e60acdbe1f32ff6eba4e9b83b35dd5e1aefbfa932805f3c182231fc8 not found: ID does not exist" Dec 05 06:12:39 crc kubenswrapper[4742]: I1205 06:12:39.051238 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1f68ebd-adfb-406e-ac16-d14599ea9bc3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a1f68ebd-adfb-406e-ac16-d14599ea9bc3" (UID: "a1f68ebd-adfb-406e-ac16-d14599ea9bc3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:39 crc kubenswrapper[4742]: I1205 06:12:39.066178 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1f68ebd-adfb-406e-ac16-d14599ea9bc3-config" (OuterVolumeSpecName: "config") pod "a1f68ebd-adfb-406e-ac16-d14599ea9bc3" (UID: "a1f68ebd-adfb-406e-ac16-d14599ea9bc3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:39 crc kubenswrapper[4742]: I1205 06:12:39.088843 4742 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a1f68ebd-adfb-406e-ac16-d14599ea9bc3-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:39 crc kubenswrapper[4742]: I1205 06:12:39.088876 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1f68ebd-adfb-406e-ac16-d14599ea9bc3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:39 crc kubenswrapper[4742]: I1205 06:12:39.088887 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/a1f68ebd-adfb-406e-ac16-d14599ea9bc3-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:39 crc kubenswrapper[4742]: I1205 06:12:39.202806 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7dcd478554-rrcm8"] Dec 05 06:12:39 crc kubenswrapper[4742]: I1205 06:12:39.210085 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-7dcd478554-rrcm8"] Dec 05 06:12:39 crc kubenswrapper[4742]: I1205 06:12:39.298438 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 05 06:12:40 crc kubenswrapper[4742]: I1205 06:12:40.211512 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 05 06:12:40 crc kubenswrapper[4742]: I1205 06:12:40.221380 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6bd586bb98-kn6cb" podUID="e1170877-6a94-478f-b6de-75fc8dd2c13e" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.157:9311/healthcheck\": read tcp 10.217.0.2:49604->10.217.0.157:9311: read: connection reset by peer" Dec 05 06:12:40 crc kubenswrapper[4742]: I1205 06:12:40.221844 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-6bd586bb98-kn6cb" podUID="e1170877-6a94-478f-b6de-75fc8dd2c13e" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.157:9311/healthcheck\": read tcp 10.217.0.2:49594->10.217.0.157:9311: read: connection reset by peer" Dec 05 06:12:40 crc kubenswrapper[4742]: I1205 06:12:40.396174 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1f68ebd-adfb-406e-ac16-d14599ea9bc3" path="/var/lib/kubelet/pods/a1f68ebd-adfb-406e-ac16-d14599ea9bc3/volumes" Dec 05 06:12:40 crc kubenswrapper[4742]: I1205 06:12:40.627768 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6bd586bb98-kn6cb" Dec 05 06:12:40 crc kubenswrapper[4742]: I1205 06:12:40.720293 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e1170877-6a94-478f-b6de-75fc8dd2c13e-config-data-custom\") pod \"e1170877-6a94-478f-b6de-75fc8dd2c13e\" (UID: \"e1170877-6a94-478f-b6de-75fc8dd2c13e\") " Dec 05 06:12:40 crc kubenswrapper[4742]: I1205 06:12:40.720368 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1170877-6a94-478f-b6de-75fc8dd2c13e-logs\") pod \"e1170877-6a94-478f-b6de-75fc8dd2c13e\" (UID: \"e1170877-6a94-478f-b6de-75fc8dd2c13e\") " Dec 05 06:12:40 crc kubenswrapper[4742]: I1205 06:12:40.720394 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1170877-6a94-478f-b6de-75fc8dd2c13e-config-data\") pod \"e1170877-6a94-478f-b6de-75fc8dd2c13e\" (UID: \"e1170877-6a94-478f-b6de-75fc8dd2c13e\") " Dec 05 06:12:40 crc kubenswrapper[4742]: I1205 06:12:40.720445 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-85qqq\" (UniqueName: \"kubernetes.io/projected/e1170877-6a94-478f-b6de-75fc8dd2c13e-kube-api-access-85qqq\") pod \"e1170877-6a94-478f-b6de-75fc8dd2c13e\" (UID: \"e1170877-6a94-478f-b6de-75fc8dd2c13e\") " Dec 05 06:12:40 crc kubenswrapper[4742]: I1205 06:12:40.720523 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1170877-6a94-478f-b6de-75fc8dd2c13e-combined-ca-bundle\") pod \"e1170877-6a94-478f-b6de-75fc8dd2c13e\" (UID: \"e1170877-6a94-478f-b6de-75fc8dd2c13e\") " Dec 05 06:12:40 crc kubenswrapper[4742]: I1205 06:12:40.721319 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1170877-6a94-478f-b6de-75fc8dd2c13e-logs" (OuterVolumeSpecName: "logs") pod "e1170877-6a94-478f-b6de-75fc8dd2c13e" (UID: "e1170877-6a94-478f-b6de-75fc8dd2c13e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:12:40 crc kubenswrapper[4742]: I1205 06:12:40.727282 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1170877-6a94-478f-b6de-75fc8dd2c13e-kube-api-access-85qqq" (OuterVolumeSpecName: "kube-api-access-85qqq") pod "e1170877-6a94-478f-b6de-75fc8dd2c13e" (UID: "e1170877-6a94-478f-b6de-75fc8dd2c13e"). InnerVolumeSpecName "kube-api-access-85qqq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:12:40 crc kubenswrapper[4742]: I1205 06:12:40.727393 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1170877-6a94-478f-b6de-75fc8dd2c13e-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "e1170877-6a94-478f-b6de-75fc8dd2c13e" (UID: "e1170877-6a94-478f-b6de-75fc8dd2c13e"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:40 crc kubenswrapper[4742]: I1205 06:12:40.747241 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1170877-6a94-478f-b6de-75fc8dd2c13e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e1170877-6a94-478f-b6de-75fc8dd2c13e" (UID: "e1170877-6a94-478f-b6de-75fc8dd2c13e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:40 crc kubenswrapper[4742]: I1205 06:12:40.780983 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1170877-6a94-478f-b6de-75fc8dd2c13e-config-data" (OuterVolumeSpecName: "config-data") pod "e1170877-6a94-478f-b6de-75fc8dd2c13e" (UID: "e1170877-6a94-478f-b6de-75fc8dd2c13e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:40 crc kubenswrapper[4742]: I1205 06:12:40.822755 4742 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e1170877-6a94-478f-b6de-75fc8dd2c13e-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:40 crc kubenswrapper[4742]: I1205 06:12:40.822797 4742 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1170877-6a94-478f-b6de-75fc8dd2c13e-logs\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:40 crc kubenswrapper[4742]: I1205 06:12:40.822809 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1170877-6a94-478f-b6de-75fc8dd2c13e-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:40 crc kubenswrapper[4742]: I1205 06:12:40.822818 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-85qqq\" (UniqueName: \"kubernetes.io/projected/e1170877-6a94-478f-b6de-75fc8dd2c13e-kube-api-access-85qqq\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:40 crc kubenswrapper[4742]: I1205 06:12:40.822828 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1170877-6a94-478f-b6de-75fc8dd2c13e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:40 crc kubenswrapper[4742]: I1205 06:12:40.914130 4742 generic.go:334] "Generic (PLEG): container finished" podID="e1170877-6a94-478f-b6de-75fc8dd2c13e" containerID="a014a2ad7cc327d9d56678a678354339e1837cc39922551432492a1b6675b135" exitCode=0 Dec 05 06:12:40 crc kubenswrapper[4742]: I1205 06:12:40.914210 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6bd586bb98-kn6cb" event={"ID":"e1170877-6a94-478f-b6de-75fc8dd2c13e","Type":"ContainerDied","Data":"a014a2ad7cc327d9d56678a678354339e1837cc39922551432492a1b6675b135"} Dec 05 06:12:40 crc kubenswrapper[4742]: I1205 06:12:40.914229 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6bd586bb98-kn6cb" Dec 05 06:12:40 crc kubenswrapper[4742]: I1205 06:12:40.914271 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6bd586bb98-kn6cb" event={"ID":"e1170877-6a94-478f-b6de-75fc8dd2c13e","Type":"ContainerDied","Data":"3901ba2ce30d243e43fcc74f982ad55c93401d8ddeb92a456fb9865c26af1f5a"} Dec 05 06:12:40 crc kubenswrapper[4742]: I1205 06:12:40.914309 4742 scope.go:117] "RemoveContainer" containerID="a014a2ad7cc327d9d56678a678354339e1837cc39922551432492a1b6675b135" Dec 05 06:12:40 crc kubenswrapper[4742]: I1205 06:12:40.944652 4742 scope.go:117] "RemoveContainer" containerID="8682ebf1f92c0afc545cd78f7740e9bdde156899892d295e085f0160519a75a6" Dec 05 06:12:40 crc kubenswrapper[4742]: I1205 06:12:40.962458 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-6bd586bb98-kn6cb"] Dec 05 06:12:40 crc kubenswrapper[4742]: I1205 06:12:40.968914 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-6bd586bb98-kn6cb"] Dec 05 06:12:40 crc kubenswrapper[4742]: I1205 06:12:40.976286 4742 scope.go:117] "RemoveContainer" containerID="a014a2ad7cc327d9d56678a678354339e1837cc39922551432492a1b6675b135" Dec 05 06:12:40 crc kubenswrapper[4742]: E1205 06:12:40.981409 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a014a2ad7cc327d9d56678a678354339e1837cc39922551432492a1b6675b135\": container with ID starting with a014a2ad7cc327d9d56678a678354339e1837cc39922551432492a1b6675b135 not found: ID does not exist" containerID="a014a2ad7cc327d9d56678a678354339e1837cc39922551432492a1b6675b135" Dec 05 06:12:40 crc kubenswrapper[4742]: I1205 06:12:40.981445 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a014a2ad7cc327d9d56678a678354339e1837cc39922551432492a1b6675b135"} err="failed to get container status \"a014a2ad7cc327d9d56678a678354339e1837cc39922551432492a1b6675b135\": rpc error: code = NotFound desc = could not find container \"a014a2ad7cc327d9d56678a678354339e1837cc39922551432492a1b6675b135\": container with ID starting with a014a2ad7cc327d9d56678a678354339e1837cc39922551432492a1b6675b135 not found: ID does not exist" Dec 05 06:12:40 crc kubenswrapper[4742]: I1205 06:12:40.981467 4742 scope.go:117] "RemoveContainer" containerID="8682ebf1f92c0afc545cd78f7740e9bdde156899892d295e085f0160519a75a6" Dec 05 06:12:40 crc kubenswrapper[4742]: E1205 06:12:40.988961 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8682ebf1f92c0afc545cd78f7740e9bdde156899892d295e085f0160519a75a6\": container with ID starting with 8682ebf1f92c0afc545cd78f7740e9bdde156899892d295e085f0160519a75a6 not found: ID does not exist" containerID="8682ebf1f92c0afc545cd78f7740e9bdde156899892d295e085f0160519a75a6" Dec 05 06:12:40 crc kubenswrapper[4742]: I1205 06:12:40.989191 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8682ebf1f92c0afc545cd78f7740e9bdde156899892d295e085f0160519a75a6"} err="failed to get container status \"8682ebf1f92c0afc545cd78f7740e9bdde156899892d295e085f0160519a75a6\": rpc error: code = NotFound desc = could not find container \"8682ebf1f92c0afc545cd78f7740e9bdde156899892d295e085f0160519a75a6\": container with ID starting with 8682ebf1f92c0afc545cd78f7740e9bdde156899892d295e085f0160519a75a6 not found: ID does not exist" Dec 05 06:12:41 crc kubenswrapper[4742]: I1205 06:12:41.524765 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-655b696477-tbv7n" Dec 05 06:12:42 crc kubenswrapper[4742]: I1205 06:12:42.396098 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1170877-6a94-478f-b6de-75fc8dd2c13e" path="/var/lib/kubelet/pods/e1170877-6a94-478f-b6de-75fc8dd2c13e/volumes" Dec 05 06:12:45 crc kubenswrapper[4742]: I1205 06:12:45.455915 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 05 06:12:46 crc kubenswrapper[4742]: I1205 06:12:46.233007 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 05 06:12:46 crc kubenswrapper[4742]: E1205 06:12:46.233353 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1170877-6a94-478f-b6de-75fc8dd2c13e" containerName="barbican-api" Dec 05 06:12:46 crc kubenswrapper[4742]: I1205 06:12:46.233368 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1170877-6a94-478f-b6de-75fc8dd2c13e" containerName="barbican-api" Dec 05 06:12:46 crc kubenswrapper[4742]: E1205 06:12:46.233390 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1f68ebd-adfb-406e-ac16-d14599ea9bc3" containerName="neutron-httpd" Dec 05 06:12:46 crc kubenswrapper[4742]: I1205 06:12:46.233395 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1f68ebd-adfb-406e-ac16-d14599ea9bc3" containerName="neutron-httpd" Dec 05 06:12:46 crc kubenswrapper[4742]: E1205 06:12:46.233409 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1170877-6a94-478f-b6de-75fc8dd2c13e" containerName="barbican-api-log" Dec 05 06:12:46 crc kubenswrapper[4742]: I1205 06:12:46.233416 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1170877-6a94-478f-b6de-75fc8dd2c13e" containerName="barbican-api-log" Dec 05 06:12:46 crc kubenswrapper[4742]: E1205 06:12:46.233432 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1f68ebd-adfb-406e-ac16-d14599ea9bc3" containerName="neutron-api" Dec 05 06:12:46 crc kubenswrapper[4742]: I1205 06:12:46.233438 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1f68ebd-adfb-406e-ac16-d14599ea9bc3" containerName="neutron-api" Dec 05 06:12:46 crc kubenswrapper[4742]: I1205 06:12:46.233599 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1170877-6a94-478f-b6de-75fc8dd2c13e" containerName="barbican-api-log" Dec 05 06:12:46 crc kubenswrapper[4742]: I1205 06:12:46.233616 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1170877-6a94-478f-b6de-75fc8dd2c13e" containerName="barbican-api" Dec 05 06:12:46 crc kubenswrapper[4742]: I1205 06:12:46.233629 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1f68ebd-adfb-406e-ac16-d14599ea9bc3" containerName="neutron-api" Dec 05 06:12:46 crc kubenswrapper[4742]: I1205 06:12:46.233641 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1f68ebd-adfb-406e-ac16-d14599ea9bc3" containerName="neutron-httpd" Dec 05 06:12:46 crc kubenswrapper[4742]: I1205 06:12:46.234165 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 06:12:46 crc kubenswrapper[4742]: I1205 06:12:46.237583 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 05 06:12:46 crc kubenswrapper[4742]: I1205 06:12:46.237613 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-89fpc" Dec 05 06:12:46 crc kubenswrapper[4742]: I1205 06:12:46.237634 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 05 06:12:46 crc kubenswrapper[4742]: I1205 06:12:46.253478 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 06:12:46 crc kubenswrapper[4742]: I1205 06:12:46.320695 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7f1e3dac-5031-4dfe-815c-1c1b447f0d64-openstack-config\") pod \"openstackclient\" (UID: \"7f1e3dac-5031-4dfe-815c-1c1b447f0d64\") " pod="openstack/openstackclient" Dec 05 06:12:46 crc kubenswrapper[4742]: I1205 06:12:46.320968 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f1e3dac-5031-4dfe-815c-1c1b447f0d64-combined-ca-bundle\") pod \"openstackclient\" (UID: \"7f1e3dac-5031-4dfe-815c-1c1b447f0d64\") " pod="openstack/openstackclient" Dec 05 06:12:46 crc kubenswrapper[4742]: I1205 06:12:46.321195 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7f1e3dac-5031-4dfe-815c-1c1b447f0d64-openstack-config-secret\") pod \"openstackclient\" (UID: \"7f1e3dac-5031-4dfe-815c-1c1b447f0d64\") " pod="openstack/openstackclient" Dec 05 06:12:46 crc kubenswrapper[4742]: I1205 06:12:46.321315 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brmjs\" (UniqueName: \"kubernetes.io/projected/7f1e3dac-5031-4dfe-815c-1c1b447f0d64-kube-api-access-brmjs\") pod \"openstackclient\" (UID: \"7f1e3dac-5031-4dfe-815c-1c1b447f0d64\") " pod="openstack/openstackclient" Dec 05 06:12:46 crc kubenswrapper[4742]: I1205 06:12:46.423319 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f1e3dac-5031-4dfe-815c-1c1b447f0d64-combined-ca-bundle\") pod \"openstackclient\" (UID: \"7f1e3dac-5031-4dfe-815c-1c1b447f0d64\") " pod="openstack/openstackclient" Dec 05 06:12:46 crc kubenswrapper[4742]: I1205 06:12:46.424511 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7f1e3dac-5031-4dfe-815c-1c1b447f0d64-openstack-config-secret\") pod \"openstackclient\" (UID: \"7f1e3dac-5031-4dfe-815c-1c1b447f0d64\") " pod="openstack/openstackclient" Dec 05 06:12:46 crc kubenswrapper[4742]: I1205 06:12:46.424626 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brmjs\" (UniqueName: \"kubernetes.io/projected/7f1e3dac-5031-4dfe-815c-1c1b447f0d64-kube-api-access-brmjs\") pod \"openstackclient\" (UID: \"7f1e3dac-5031-4dfe-815c-1c1b447f0d64\") " pod="openstack/openstackclient" Dec 05 06:12:46 crc kubenswrapper[4742]: I1205 06:12:46.425096 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7f1e3dac-5031-4dfe-815c-1c1b447f0d64-openstack-config\") pod \"openstackclient\" (UID: \"7f1e3dac-5031-4dfe-815c-1c1b447f0d64\") " pod="openstack/openstackclient" Dec 05 06:12:46 crc kubenswrapper[4742]: I1205 06:12:46.425960 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7f1e3dac-5031-4dfe-815c-1c1b447f0d64-openstack-config\") pod \"openstackclient\" (UID: \"7f1e3dac-5031-4dfe-815c-1c1b447f0d64\") " pod="openstack/openstackclient" Dec 05 06:12:46 crc kubenswrapper[4742]: I1205 06:12:46.431539 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7f1e3dac-5031-4dfe-815c-1c1b447f0d64-openstack-config-secret\") pod \"openstackclient\" (UID: \"7f1e3dac-5031-4dfe-815c-1c1b447f0d64\") " pod="openstack/openstackclient" Dec 05 06:12:46 crc kubenswrapper[4742]: I1205 06:12:46.431838 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f1e3dac-5031-4dfe-815c-1c1b447f0d64-combined-ca-bundle\") pod \"openstackclient\" (UID: \"7f1e3dac-5031-4dfe-815c-1c1b447f0d64\") " pod="openstack/openstackclient" Dec 05 06:12:46 crc kubenswrapper[4742]: I1205 06:12:46.445928 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brmjs\" (UniqueName: \"kubernetes.io/projected/7f1e3dac-5031-4dfe-815c-1c1b447f0d64-kube-api-access-brmjs\") pod \"openstackclient\" (UID: \"7f1e3dac-5031-4dfe-815c-1c1b447f0d64\") " pod="openstack/openstackclient" Dec 05 06:12:46 crc kubenswrapper[4742]: I1205 06:12:46.550479 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.088457 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.124577 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.124916 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1c75d285-a5c8-46f4-9a02-46bca7a81694" containerName="ceilometer-central-agent" containerID="cri-o://01a430e7ee682bd0558d05549cc5337e93ae20e189353815e2f64ec224b76659" gracePeriod=30 Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.124959 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1c75d285-a5c8-46f4-9a02-46bca7a81694" containerName="sg-core" containerID="cri-o://2803d832cdfff451b43b9a925d99698f58c41069d927a523197d9ed45511172c" gracePeriod=30 Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.125030 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1c75d285-a5c8-46f4-9a02-46bca7a81694" containerName="proxy-httpd" containerID="cri-o://e0c246cda86a7ce8a124ffb6eef34b61f5249c7fea70d6d230faadeabcf73213" gracePeriod=30 Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.125050 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1c75d285-a5c8-46f4-9a02-46bca7a81694" containerName="ceilometer-notification-agent" containerID="cri-o://37d4718bc018b00d8f844cd6925baeee526cbc305d9522ef98c7b9906946f6f4" gracePeriod=30 Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.153883 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="1c75d285-a5c8-46f4-9a02-46bca7a81694" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.161:3000/\": EOF" Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.397774 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-5f7476cfc7-5r2mm"] Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.399528 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5f7476cfc7-5r2mm" Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.402494 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.402751 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.402904 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.471115 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-5f7476cfc7-5r2mm"] Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.481911 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ef1f42c-4004-49d9-9456-4d4df074004f-log-httpd\") pod \"swift-proxy-5f7476cfc7-5r2mm\" (UID: \"8ef1f42c-4004-49d9-9456-4d4df074004f\") " pod="openstack/swift-proxy-5f7476cfc7-5r2mm" Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.481978 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ef1f42c-4004-49d9-9456-4d4df074004f-combined-ca-bundle\") pod \"swift-proxy-5f7476cfc7-5r2mm\" (UID: \"8ef1f42c-4004-49d9-9456-4d4df074004f\") " pod="openstack/swift-proxy-5f7476cfc7-5r2mm" Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.482016 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8ef1f42c-4004-49d9-9456-4d4df074004f-etc-swift\") pod \"swift-proxy-5f7476cfc7-5r2mm\" (UID: \"8ef1f42c-4004-49d9-9456-4d4df074004f\") " pod="openstack/swift-proxy-5f7476cfc7-5r2mm" Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.482042 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ef1f42c-4004-49d9-9456-4d4df074004f-config-data\") pod \"swift-proxy-5f7476cfc7-5r2mm\" (UID: \"8ef1f42c-4004-49d9-9456-4d4df074004f\") " pod="openstack/swift-proxy-5f7476cfc7-5r2mm" Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.482086 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ef1f42c-4004-49d9-9456-4d4df074004f-run-httpd\") pod \"swift-proxy-5f7476cfc7-5r2mm\" (UID: \"8ef1f42c-4004-49d9-9456-4d4df074004f\") " pod="openstack/swift-proxy-5f7476cfc7-5r2mm" Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.482201 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8ef1f42c-4004-49d9-9456-4d4df074004f-public-tls-certs\") pod \"swift-proxy-5f7476cfc7-5r2mm\" (UID: \"8ef1f42c-4004-49d9-9456-4d4df074004f\") " pod="openstack/swift-proxy-5f7476cfc7-5r2mm" Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.482265 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8ef1f42c-4004-49d9-9456-4d4df074004f-internal-tls-certs\") pod \"swift-proxy-5f7476cfc7-5r2mm\" (UID: \"8ef1f42c-4004-49d9-9456-4d4df074004f\") " pod="openstack/swift-proxy-5f7476cfc7-5r2mm" Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.482315 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwsbx\" (UniqueName: \"kubernetes.io/projected/8ef1f42c-4004-49d9-9456-4d4df074004f-kube-api-access-zwsbx\") pod \"swift-proxy-5f7476cfc7-5r2mm\" (UID: \"8ef1f42c-4004-49d9-9456-4d4df074004f\") " pod="openstack/swift-proxy-5f7476cfc7-5r2mm" Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.583280 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ef1f42c-4004-49d9-9456-4d4df074004f-log-httpd\") pod \"swift-proxy-5f7476cfc7-5r2mm\" (UID: \"8ef1f42c-4004-49d9-9456-4d4df074004f\") " pod="openstack/swift-proxy-5f7476cfc7-5r2mm" Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.584252 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ef1f42c-4004-49d9-9456-4d4df074004f-combined-ca-bundle\") pod \"swift-proxy-5f7476cfc7-5r2mm\" (UID: \"8ef1f42c-4004-49d9-9456-4d4df074004f\") " pod="openstack/swift-proxy-5f7476cfc7-5r2mm" Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.584365 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8ef1f42c-4004-49d9-9456-4d4df074004f-etc-swift\") pod \"swift-proxy-5f7476cfc7-5r2mm\" (UID: \"8ef1f42c-4004-49d9-9456-4d4df074004f\") " pod="openstack/swift-proxy-5f7476cfc7-5r2mm" Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.584474 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ef1f42c-4004-49d9-9456-4d4df074004f-config-data\") pod \"swift-proxy-5f7476cfc7-5r2mm\" (UID: \"8ef1f42c-4004-49d9-9456-4d4df074004f\") " pod="openstack/swift-proxy-5f7476cfc7-5r2mm" Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.584573 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ef1f42c-4004-49d9-9456-4d4df074004f-run-httpd\") pod \"swift-proxy-5f7476cfc7-5r2mm\" (UID: \"8ef1f42c-4004-49d9-9456-4d4df074004f\") " pod="openstack/swift-proxy-5f7476cfc7-5r2mm" Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.584768 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8ef1f42c-4004-49d9-9456-4d4df074004f-public-tls-certs\") pod \"swift-proxy-5f7476cfc7-5r2mm\" (UID: \"8ef1f42c-4004-49d9-9456-4d4df074004f\") " pod="openstack/swift-proxy-5f7476cfc7-5r2mm" Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.584886 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8ef1f42c-4004-49d9-9456-4d4df074004f-internal-tls-certs\") pod \"swift-proxy-5f7476cfc7-5r2mm\" (UID: \"8ef1f42c-4004-49d9-9456-4d4df074004f\") " pod="openstack/swift-proxy-5f7476cfc7-5r2mm" Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.584989 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwsbx\" (UniqueName: \"kubernetes.io/projected/8ef1f42c-4004-49d9-9456-4d4df074004f-kube-api-access-zwsbx\") pod \"swift-proxy-5f7476cfc7-5r2mm\" (UID: \"8ef1f42c-4004-49d9-9456-4d4df074004f\") " pod="openstack/swift-proxy-5f7476cfc7-5r2mm" Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.583757 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ef1f42c-4004-49d9-9456-4d4df074004f-log-httpd\") pod \"swift-proxy-5f7476cfc7-5r2mm\" (UID: \"8ef1f42c-4004-49d9-9456-4d4df074004f\") " pod="openstack/swift-proxy-5f7476cfc7-5r2mm" Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.585562 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ef1f42c-4004-49d9-9456-4d4df074004f-run-httpd\") pod \"swift-proxy-5f7476cfc7-5r2mm\" (UID: \"8ef1f42c-4004-49d9-9456-4d4df074004f\") " pod="openstack/swift-proxy-5f7476cfc7-5r2mm" Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.591011 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8ef1f42c-4004-49d9-9456-4d4df074004f-internal-tls-certs\") pod \"swift-proxy-5f7476cfc7-5r2mm\" (UID: \"8ef1f42c-4004-49d9-9456-4d4df074004f\") " pod="openstack/swift-proxy-5f7476cfc7-5r2mm" Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.591090 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ef1f42c-4004-49d9-9456-4d4df074004f-config-data\") pod \"swift-proxy-5f7476cfc7-5r2mm\" (UID: \"8ef1f42c-4004-49d9-9456-4d4df074004f\") " pod="openstack/swift-proxy-5f7476cfc7-5r2mm" Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.591319 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8ef1f42c-4004-49d9-9456-4d4df074004f-etc-swift\") pod \"swift-proxy-5f7476cfc7-5r2mm\" (UID: \"8ef1f42c-4004-49d9-9456-4d4df074004f\") " pod="openstack/swift-proxy-5f7476cfc7-5r2mm" Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.592298 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ef1f42c-4004-49d9-9456-4d4df074004f-combined-ca-bundle\") pod \"swift-proxy-5f7476cfc7-5r2mm\" (UID: \"8ef1f42c-4004-49d9-9456-4d4df074004f\") " pod="openstack/swift-proxy-5f7476cfc7-5r2mm" Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.592882 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8ef1f42c-4004-49d9-9456-4d4df074004f-public-tls-certs\") pod \"swift-proxy-5f7476cfc7-5r2mm\" (UID: \"8ef1f42c-4004-49d9-9456-4d4df074004f\") " pod="openstack/swift-proxy-5f7476cfc7-5r2mm" Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.604098 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwsbx\" (UniqueName: \"kubernetes.io/projected/8ef1f42c-4004-49d9-9456-4d4df074004f-kube-api-access-zwsbx\") pod \"swift-proxy-5f7476cfc7-5r2mm\" (UID: \"8ef1f42c-4004-49d9-9456-4d4df074004f\") " pod="openstack/swift-proxy-5f7476cfc7-5r2mm" Dec 05 06:12:47 crc kubenswrapper[4742]: I1205 06:12:47.717201 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5f7476cfc7-5r2mm" Dec 05 06:12:48 crc kubenswrapper[4742]: I1205 06:12:48.001513 4742 generic.go:334] "Generic (PLEG): container finished" podID="1c75d285-a5c8-46f4-9a02-46bca7a81694" containerID="e0c246cda86a7ce8a124ffb6eef34b61f5249c7fea70d6d230faadeabcf73213" exitCode=0 Dec 05 06:12:48 crc kubenswrapper[4742]: I1205 06:12:48.001895 4742 generic.go:334] "Generic (PLEG): container finished" podID="1c75d285-a5c8-46f4-9a02-46bca7a81694" containerID="2803d832cdfff451b43b9a925d99698f58c41069d927a523197d9ed45511172c" exitCode=2 Dec 05 06:12:48 crc kubenswrapper[4742]: I1205 06:12:48.001908 4742 generic.go:334] "Generic (PLEG): container finished" podID="1c75d285-a5c8-46f4-9a02-46bca7a81694" containerID="01a430e7ee682bd0558d05549cc5337e93ae20e189353815e2f64ec224b76659" exitCode=0 Dec 05 06:12:48 crc kubenswrapper[4742]: I1205 06:12:48.001964 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c75d285-a5c8-46f4-9a02-46bca7a81694","Type":"ContainerDied","Data":"e0c246cda86a7ce8a124ffb6eef34b61f5249c7fea70d6d230faadeabcf73213"} Dec 05 06:12:48 crc kubenswrapper[4742]: I1205 06:12:48.001997 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c75d285-a5c8-46f4-9a02-46bca7a81694","Type":"ContainerDied","Data":"2803d832cdfff451b43b9a925d99698f58c41069d927a523197d9ed45511172c"} Dec 05 06:12:48 crc kubenswrapper[4742]: I1205 06:12:48.002010 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c75d285-a5c8-46f4-9a02-46bca7a81694","Type":"ContainerDied","Data":"01a430e7ee682bd0558d05549cc5337e93ae20e189353815e2f64ec224b76659"} Dec 05 06:12:48 crc kubenswrapper[4742]: I1205 06:12:48.003754 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"7f1e3dac-5031-4dfe-815c-1c1b447f0d64","Type":"ContainerStarted","Data":"79a4e02e665ae390cda5eaaf81a5ac029d10ea4cb5cd7af08162f99d7f86b7e0"} Dec 05 06:12:48 crc kubenswrapper[4742]: I1205 06:12:48.290987 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-5f7476cfc7-5r2mm"] Dec 05 06:12:48 crc kubenswrapper[4742]: W1205 06:12:48.304767 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8ef1f42c_4004_49d9_9456_4d4df074004f.slice/crio-9319b3b7ddd9e01160e5f745add76719f3f9fa58204f0f144e712e1cdfea8080 WatchSource:0}: Error finding container 9319b3b7ddd9e01160e5f745add76719f3f9fa58204f0f144e712e1cdfea8080: Status 404 returned error can't find the container with id 9319b3b7ddd9e01160e5f745add76719f3f9fa58204f0f144e712e1cdfea8080 Dec 05 06:12:49 crc kubenswrapper[4742]: I1205 06:12:49.017851 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5f7476cfc7-5r2mm" event={"ID":"8ef1f42c-4004-49d9-9456-4d4df074004f","Type":"ContainerStarted","Data":"26e1bb24efd752b2c3019b0bfc4555cbdbfc083437d9055e783ed7089c77d920"} Dec 05 06:12:49 crc kubenswrapper[4742]: I1205 06:12:49.018231 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5f7476cfc7-5r2mm" event={"ID":"8ef1f42c-4004-49d9-9456-4d4df074004f","Type":"ContainerStarted","Data":"5b04102935046122a7f13426fa065fb74c80fe74f085c0c16a9aab6c2234ef7c"} Dec 05 06:12:49 crc kubenswrapper[4742]: I1205 06:12:49.018264 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-5f7476cfc7-5r2mm" Dec 05 06:12:49 crc kubenswrapper[4742]: I1205 06:12:49.018282 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5f7476cfc7-5r2mm" event={"ID":"8ef1f42c-4004-49d9-9456-4d4df074004f","Type":"ContainerStarted","Data":"9319b3b7ddd9e01160e5f745add76719f3f9fa58204f0f144e712e1cdfea8080"} Dec 05 06:12:49 crc kubenswrapper[4742]: I1205 06:12:49.018308 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-5f7476cfc7-5r2mm" Dec 05 06:12:49 crc kubenswrapper[4742]: I1205 06:12:49.051903 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-5f7476cfc7-5r2mm" podStartSLOduration=2.051875251 podStartE2EDuration="2.051875251s" podCreationTimestamp="2025-12-05 06:12:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:12:49.045201214 +0000 UTC m=+1244.957336316" watchObservedRunningTime="2025-12-05 06:12:49.051875251 +0000 UTC m=+1244.964010323" Dec 05 06:12:52 crc kubenswrapper[4742]: I1205 06:12:52.048535 4742 generic.go:334] "Generic (PLEG): container finished" podID="1c75d285-a5c8-46f4-9a02-46bca7a81694" containerID="37d4718bc018b00d8f844cd6925baeee526cbc305d9522ef98c7b9906946f6f4" exitCode=0 Dec 05 06:12:52 crc kubenswrapper[4742]: I1205 06:12:52.048681 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c75d285-a5c8-46f4-9a02-46bca7a81694","Type":"ContainerDied","Data":"37d4718bc018b00d8f844cd6925baeee526cbc305d9522ef98c7b9906946f6f4"} Dec 05 06:12:54 crc kubenswrapper[4742]: I1205 06:12:54.069152 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="1c75d285-a5c8-46f4-9a02-46bca7a81694" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.161:3000/\": dial tcp 10.217.0.161:3000: connect: connection refused" Dec 05 06:12:57 crc kubenswrapper[4742]: I1205 06:12:57.154448 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 06:12:57 crc kubenswrapper[4742]: I1205 06:12:57.259199 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c75d285-a5c8-46f4-9a02-46bca7a81694-combined-ca-bundle\") pod \"1c75d285-a5c8-46f4-9a02-46bca7a81694\" (UID: \"1c75d285-a5c8-46f4-9a02-46bca7a81694\") " Dec 05 06:12:57 crc kubenswrapper[4742]: I1205 06:12:57.259284 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c75d285-a5c8-46f4-9a02-46bca7a81694-config-data\") pod \"1c75d285-a5c8-46f4-9a02-46bca7a81694\" (UID: \"1c75d285-a5c8-46f4-9a02-46bca7a81694\") " Dec 05 06:12:57 crc kubenswrapper[4742]: I1205 06:12:57.259436 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1c75d285-a5c8-46f4-9a02-46bca7a81694-sg-core-conf-yaml\") pod \"1c75d285-a5c8-46f4-9a02-46bca7a81694\" (UID: \"1c75d285-a5c8-46f4-9a02-46bca7a81694\") " Dec 05 06:12:57 crc kubenswrapper[4742]: I1205 06:12:57.259461 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c75d285-a5c8-46f4-9a02-46bca7a81694-scripts\") pod \"1c75d285-a5c8-46f4-9a02-46bca7a81694\" (UID: \"1c75d285-a5c8-46f4-9a02-46bca7a81694\") " Dec 05 06:12:57 crc kubenswrapper[4742]: I1205 06:12:57.259510 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v8ldd\" (UniqueName: \"kubernetes.io/projected/1c75d285-a5c8-46f4-9a02-46bca7a81694-kube-api-access-v8ldd\") pod \"1c75d285-a5c8-46f4-9a02-46bca7a81694\" (UID: \"1c75d285-a5c8-46f4-9a02-46bca7a81694\") " Dec 05 06:12:57 crc kubenswrapper[4742]: I1205 06:12:57.260008 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c75d285-a5c8-46f4-9a02-46bca7a81694-run-httpd\") pod \"1c75d285-a5c8-46f4-9a02-46bca7a81694\" (UID: \"1c75d285-a5c8-46f4-9a02-46bca7a81694\") " Dec 05 06:12:57 crc kubenswrapper[4742]: I1205 06:12:57.260041 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c75d285-a5c8-46f4-9a02-46bca7a81694-log-httpd\") pod \"1c75d285-a5c8-46f4-9a02-46bca7a81694\" (UID: \"1c75d285-a5c8-46f4-9a02-46bca7a81694\") " Dec 05 06:12:57 crc kubenswrapper[4742]: I1205 06:12:57.260330 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c75d285-a5c8-46f4-9a02-46bca7a81694-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "1c75d285-a5c8-46f4-9a02-46bca7a81694" (UID: "1c75d285-a5c8-46f4-9a02-46bca7a81694"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:12:57 crc kubenswrapper[4742]: I1205 06:12:57.260450 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c75d285-a5c8-46f4-9a02-46bca7a81694-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "1c75d285-a5c8-46f4-9a02-46bca7a81694" (UID: "1c75d285-a5c8-46f4-9a02-46bca7a81694"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:12:57 crc kubenswrapper[4742]: I1205 06:12:57.260816 4742 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c75d285-a5c8-46f4-9a02-46bca7a81694-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:57 crc kubenswrapper[4742]: I1205 06:12:57.260829 4742 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c75d285-a5c8-46f4-9a02-46bca7a81694-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:57 crc kubenswrapper[4742]: I1205 06:12:57.265348 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c75d285-a5c8-46f4-9a02-46bca7a81694-scripts" (OuterVolumeSpecName: "scripts") pod "1c75d285-a5c8-46f4-9a02-46bca7a81694" (UID: "1c75d285-a5c8-46f4-9a02-46bca7a81694"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:57 crc kubenswrapper[4742]: I1205 06:12:57.266004 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c75d285-a5c8-46f4-9a02-46bca7a81694-kube-api-access-v8ldd" (OuterVolumeSpecName: "kube-api-access-v8ldd") pod "1c75d285-a5c8-46f4-9a02-46bca7a81694" (UID: "1c75d285-a5c8-46f4-9a02-46bca7a81694"). InnerVolumeSpecName "kube-api-access-v8ldd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:12:57 crc kubenswrapper[4742]: I1205 06:12:57.300972 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c75d285-a5c8-46f4-9a02-46bca7a81694-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "1c75d285-a5c8-46f4-9a02-46bca7a81694" (UID: "1c75d285-a5c8-46f4-9a02-46bca7a81694"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:57 crc kubenswrapper[4742]: I1205 06:12:57.350195 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c75d285-a5c8-46f4-9a02-46bca7a81694-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1c75d285-a5c8-46f4-9a02-46bca7a81694" (UID: "1c75d285-a5c8-46f4-9a02-46bca7a81694"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:57 crc kubenswrapper[4742]: I1205 06:12:57.362504 4742 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1c75d285-a5c8-46f4-9a02-46bca7a81694-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:57 crc kubenswrapper[4742]: I1205 06:12:57.362553 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c75d285-a5c8-46f4-9a02-46bca7a81694-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:57 crc kubenswrapper[4742]: I1205 06:12:57.362571 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v8ldd\" (UniqueName: \"kubernetes.io/projected/1c75d285-a5c8-46f4-9a02-46bca7a81694-kube-api-access-v8ldd\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:57 crc kubenswrapper[4742]: I1205 06:12:57.362591 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c75d285-a5c8-46f4-9a02-46bca7a81694-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:57 crc kubenswrapper[4742]: I1205 06:12:57.373016 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c75d285-a5c8-46f4-9a02-46bca7a81694-config-data" (OuterVolumeSpecName: "config-data") pod "1c75d285-a5c8-46f4-9a02-46bca7a81694" (UID: "1c75d285-a5c8-46f4-9a02-46bca7a81694"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:12:57 crc kubenswrapper[4742]: I1205 06:12:57.464368 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c75d285-a5c8-46f4-9a02-46bca7a81694-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:12:57 crc kubenswrapper[4742]: I1205 06:12:57.724755 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-5f7476cfc7-5r2mm" Dec 05 06:12:57 crc kubenswrapper[4742]: I1205 06:12:57.732934 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-5f7476cfc7-5r2mm" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.104576 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c75d285-a5c8-46f4-9a02-46bca7a81694","Type":"ContainerDied","Data":"585f9c09cfa6ed9b126910a041f6f497f567720b338d4a73224fb5f32aead6cd"} Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.104607 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.104684 4742 scope.go:117] "RemoveContainer" containerID="e0c246cda86a7ce8a124ffb6eef34b61f5249c7fea70d6d230faadeabcf73213" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.107173 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"7f1e3dac-5031-4dfe-815c-1c1b447f0d64","Type":"ContainerStarted","Data":"0b24c5bc7a890ac19ac54b2ce282a48fb159a89cb8f1121185c3c1f4ebc77ba4"} Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.122177 4742 scope.go:117] "RemoveContainer" containerID="2803d832cdfff451b43b9a925d99698f58c41069d927a523197d9ed45511172c" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.134157 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.341406084 podStartE2EDuration="12.134137187s" podCreationTimestamp="2025-12-05 06:12:46 +0000 UTC" firstStartedPulling="2025-12-05 06:12:47.095019975 +0000 UTC m=+1243.007155037" lastFinishedPulling="2025-12-05 06:12:56.887751078 +0000 UTC m=+1252.799886140" observedRunningTime="2025-12-05 06:12:58.1334924 +0000 UTC m=+1254.045627492" watchObservedRunningTime="2025-12-05 06:12:58.134137187 +0000 UTC m=+1254.046272249" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.141736 4742 scope.go:117] "RemoveContainer" containerID="37d4718bc018b00d8f844cd6925baeee526cbc305d9522ef98c7b9906946f6f4" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.163231 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.168939 4742 scope.go:117] "RemoveContainer" containerID="01a430e7ee682bd0558d05549cc5337e93ae20e189353815e2f64ec224b76659" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.173934 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.181522 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:12:58 crc kubenswrapper[4742]: E1205 06:12:58.181827 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c75d285-a5c8-46f4-9a02-46bca7a81694" containerName="ceilometer-central-agent" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.181843 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c75d285-a5c8-46f4-9a02-46bca7a81694" containerName="ceilometer-central-agent" Dec 05 06:12:58 crc kubenswrapper[4742]: E1205 06:12:58.181865 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c75d285-a5c8-46f4-9a02-46bca7a81694" containerName="proxy-httpd" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.181872 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c75d285-a5c8-46f4-9a02-46bca7a81694" containerName="proxy-httpd" Dec 05 06:12:58 crc kubenswrapper[4742]: E1205 06:12:58.181892 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c75d285-a5c8-46f4-9a02-46bca7a81694" containerName="sg-core" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.181897 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c75d285-a5c8-46f4-9a02-46bca7a81694" containerName="sg-core" Dec 05 06:12:58 crc kubenswrapper[4742]: E1205 06:12:58.181916 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c75d285-a5c8-46f4-9a02-46bca7a81694" containerName="ceilometer-notification-agent" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.181924 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c75d285-a5c8-46f4-9a02-46bca7a81694" containerName="ceilometer-notification-agent" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.182085 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c75d285-a5c8-46f4-9a02-46bca7a81694" containerName="sg-core" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.182098 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c75d285-a5c8-46f4-9a02-46bca7a81694" containerName="proxy-httpd" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.182110 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c75d285-a5c8-46f4-9a02-46bca7a81694" containerName="ceilometer-central-agent" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.182121 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c75d285-a5c8-46f4-9a02-46bca7a81694" containerName="ceilometer-notification-agent" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.183503 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.188835 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.189250 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.224099 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.277322 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5czxs\" (UniqueName: \"kubernetes.io/projected/7cc340c5-1842-4b38-af61-4cc7f24b6118-kube-api-access-5czxs\") pod \"ceilometer-0\" (UID: \"7cc340c5-1842-4b38-af61-4cc7f24b6118\") " pod="openstack/ceilometer-0" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.277384 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7cc340c5-1842-4b38-af61-4cc7f24b6118-run-httpd\") pod \"ceilometer-0\" (UID: \"7cc340c5-1842-4b38-af61-4cc7f24b6118\") " pod="openstack/ceilometer-0" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.277414 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cc340c5-1842-4b38-af61-4cc7f24b6118-config-data\") pod \"ceilometer-0\" (UID: \"7cc340c5-1842-4b38-af61-4cc7f24b6118\") " pod="openstack/ceilometer-0" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.277501 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7cc340c5-1842-4b38-af61-4cc7f24b6118-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7cc340c5-1842-4b38-af61-4cc7f24b6118\") " pod="openstack/ceilometer-0" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.277529 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7cc340c5-1842-4b38-af61-4cc7f24b6118-log-httpd\") pod \"ceilometer-0\" (UID: \"7cc340c5-1842-4b38-af61-4cc7f24b6118\") " pod="openstack/ceilometer-0" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.277611 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cc340c5-1842-4b38-af61-4cc7f24b6118-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7cc340c5-1842-4b38-af61-4cc7f24b6118\") " pod="openstack/ceilometer-0" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.277651 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7cc340c5-1842-4b38-af61-4cc7f24b6118-scripts\") pod \"ceilometer-0\" (UID: \"7cc340c5-1842-4b38-af61-4cc7f24b6118\") " pod="openstack/ceilometer-0" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.379225 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7cc340c5-1842-4b38-af61-4cc7f24b6118-log-httpd\") pod \"ceilometer-0\" (UID: \"7cc340c5-1842-4b38-af61-4cc7f24b6118\") " pod="openstack/ceilometer-0" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.379783 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cc340c5-1842-4b38-af61-4cc7f24b6118-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7cc340c5-1842-4b38-af61-4cc7f24b6118\") " pod="openstack/ceilometer-0" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.379880 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7cc340c5-1842-4b38-af61-4cc7f24b6118-scripts\") pod \"ceilometer-0\" (UID: \"7cc340c5-1842-4b38-af61-4cc7f24b6118\") " pod="openstack/ceilometer-0" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.379702 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7cc340c5-1842-4b38-af61-4cc7f24b6118-log-httpd\") pod \"ceilometer-0\" (UID: \"7cc340c5-1842-4b38-af61-4cc7f24b6118\") " pod="openstack/ceilometer-0" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.380065 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5czxs\" (UniqueName: \"kubernetes.io/projected/7cc340c5-1842-4b38-af61-4cc7f24b6118-kube-api-access-5czxs\") pod \"ceilometer-0\" (UID: \"7cc340c5-1842-4b38-af61-4cc7f24b6118\") " pod="openstack/ceilometer-0" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.380142 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7cc340c5-1842-4b38-af61-4cc7f24b6118-run-httpd\") pod \"ceilometer-0\" (UID: \"7cc340c5-1842-4b38-af61-4cc7f24b6118\") " pod="openstack/ceilometer-0" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.380226 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cc340c5-1842-4b38-af61-4cc7f24b6118-config-data\") pod \"ceilometer-0\" (UID: \"7cc340c5-1842-4b38-af61-4cc7f24b6118\") " pod="openstack/ceilometer-0" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.380339 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7cc340c5-1842-4b38-af61-4cc7f24b6118-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7cc340c5-1842-4b38-af61-4cc7f24b6118\") " pod="openstack/ceilometer-0" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.380546 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7cc340c5-1842-4b38-af61-4cc7f24b6118-run-httpd\") pod \"ceilometer-0\" (UID: \"7cc340c5-1842-4b38-af61-4cc7f24b6118\") " pod="openstack/ceilometer-0" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.383665 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7cc340c5-1842-4b38-af61-4cc7f24b6118-scripts\") pod \"ceilometer-0\" (UID: \"7cc340c5-1842-4b38-af61-4cc7f24b6118\") " pod="openstack/ceilometer-0" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.383941 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cc340c5-1842-4b38-af61-4cc7f24b6118-config-data\") pod \"ceilometer-0\" (UID: \"7cc340c5-1842-4b38-af61-4cc7f24b6118\") " pod="openstack/ceilometer-0" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.389670 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7cc340c5-1842-4b38-af61-4cc7f24b6118-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7cc340c5-1842-4b38-af61-4cc7f24b6118\") " pod="openstack/ceilometer-0" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.391573 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cc340c5-1842-4b38-af61-4cc7f24b6118-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7cc340c5-1842-4b38-af61-4cc7f24b6118\") " pod="openstack/ceilometer-0" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.393668 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c75d285-a5c8-46f4-9a02-46bca7a81694" path="/var/lib/kubelet/pods/1c75d285-a5c8-46f4-9a02-46bca7a81694/volumes" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.411209 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5czxs\" (UniqueName: \"kubernetes.io/projected/7cc340c5-1842-4b38-af61-4cc7f24b6118-kube-api-access-5czxs\") pod \"ceilometer-0\" (UID: \"7cc340c5-1842-4b38-af61-4cc7f24b6118\") " pod="openstack/ceilometer-0" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.513963 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 06:12:58 crc kubenswrapper[4742]: I1205 06:12:58.981507 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:12:59 crc kubenswrapper[4742]: I1205 06:12:59.115100 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7cc340c5-1842-4b38-af61-4cc7f24b6118","Type":"ContainerStarted","Data":"966fe8f06e102d26d475b47e5469cfb31263364443460cbabf3b0f270a2dac94"} Dec 05 06:13:00 crc kubenswrapper[4742]: I1205 06:13:00.125253 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7cc340c5-1842-4b38-af61-4cc7f24b6118","Type":"ContainerStarted","Data":"c80302b78ebba3e57bc4582784ddd64945c60da2c78c331f822f74376b24cf37"} Dec 05 06:13:00 crc kubenswrapper[4742]: I1205 06:13:00.256288 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:13:00 crc kubenswrapper[4742]: I1205 06:13:00.665328 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 06:13:00 crc kubenswrapper[4742]: I1205 06:13:00.665779 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="a3450678-40d0-44f3-bcd6-c9d5b773812b" containerName="glance-log" containerID="cri-o://911e97f444607219414c7206dea5e324e9af39fb5fc4cbb23881ea0ab567c41c" gracePeriod=30 Dec 05 06:13:00 crc kubenswrapper[4742]: I1205 06:13:00.665860 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="a3450678-40d0-44f3-bcd6-c9d5b773812b" containerName="glance-httpd" containerID="cri-o://d77cb7753d933ccde9dd454627520ab2dfcecd93ead766179bb9b76c07274bed" gracePeriod=30 Dec 05 06:13:01 crc kubenswrapper[4742]: I1205 06:13:01.141503 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7cc340c5-1842-4b38-af61-4cc7f24b6118","Type":"ContainerStarted","Data":"4bb2baaec185e8417c830da7f44f14ec9002a157e466150af2923ab43a1992e3"} Dec 05 06:13:01 crc kubenswrapper[4742]: I1205 06:13:01.141844 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7cc340c5-1842-4b38-af61-4cc7f24b6118","Type":"ContainerStarted","Data":"a3a7c965f36083dda1ad293969a949eb60bda045103f9b40ecc66e28cd7143d3"} Dec 05 06:13:01 crc kubenswrapper[4742]: I1205 06:13:01.143353 4742 generic.go:334] "Generic (PLEG): container finished" podID="a3450678-40d0-44f3-bcd6-c9d5b773812b" containerID="911e97f444607219414c7206dea5e324e9af39fb5fc4cbb23881ea0ab567c41c" exitCode=143 Dec 05 06:13:01 crc kubenswrapper[4742]: I1205 06:13:01.143380 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a3450678-40d0-44f3-bcd6-c9d5b773812b","Type":"ContainerDied","Data":"911e97f444607219414c7206dea5e324e9af39fb5fc4cbb23881ea0ab567c41c"} Dec 05 06:13:03 crc kubenswrapper[4742]: I1205 06:13:03.162421 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7cc340c5-1842-4b38-af61-4cc7f24b6118","Type":"ContainerStarted","Data":"79c48523795f949a3e88f948381417c01fc32793400c7098935ffcf6596380d5"} Dec 05 06:13:03 crc kubenswrapper[4742]: I1205 06:13:03.162784 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7cc340c5-1842-4b38-af61-4cc7f24b6118" containerName="ceilometer-central-agent" containerID="cri-o://c80302b78ebba3e57bc4582784ddd64945c60da2c78c331f822f74376b24cf37" gracePeriod=30 Dec 05 06:13:03 crc kubenswrapper[4742]: I1205 06:13:03.162952 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7cc340c5-1842-4b38-af61-4cc7f24b6118" containerName="proxy-httpd" containerID="cri-o://79c48523795f949a3e88f948381417c01fc32793400c7098935ffcf6596380d5" gracePeriod=30 Dec 05 06:13:03 crc kubenswrapper[4742]: I1205 06:13:03.162992 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 06:13:03 crc kubenswrapper[4742]: I1205 06:13:03.163091 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7cc340c5-1842-4b38-af61-4cc7f24b6118" containerName="sg-core" containerID="cri-o://4bb2baaec185e8417c830da7f44f14ec9002a157e466150af2923ab43a1992e3" gracePeriod=30 Dec 05 06:13:03 crc kubenswrapper[4742]: I1205 06:13:03.163125 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7cc340c5-1842-4b38-af61-4cc7f24b6118" containerName="ceilometer-notification-agent" containerID="cri-o://a3a7c965f36083dda1ad293969a949eb60bda045103f9b40ecc66e28cd7143d3" gracePeriod=30 Dec 05 06:13:03 crc kubenswrapper[4742]: I1205 06:13:03.192579 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.2764376410000002 podStartE2EDuration="5.192560999s" podCreationTimestamp="2025-12-05 06:12:58 +0000 UTC" firstStartedPulling="2025-12-05 06:12:58.98827651 +0000 UTC m=+1254.900411572" lastFinishedPulling="2025-12-05 06:13:01.904399858 +0000 UTC m=+1257.816534930" observedRunningTime="2025-12-05 06:13:03.185569153 +0000 UTC m=+1259.097704215" watchObservedRunningTime="2025-12-05 06:13:03.192560999 +0000 UTC m=+1259.104696061" Dec 05 06:13:03 crc kubenswrapper[4742]: I1205 06:13:03.804525 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="a3450678-40d0-44f3-bcd6-c9d5b773812b" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.150:9292/healthcheck\": read tcp 10.217.0.2:51482->10.217.0.150:9292: read: connection reset by peer" Dec 05 06:13:03 crc kubenswrapper[4742]: I1205 06:13:03.804541 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="a3450678-40d0-44f3-bcd6-c9d5b773812b" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.150:9292/healthcheck\": read tcp 10.217.0.2:51486->10.217.0.150:9292: read: connection reset by peer" Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.176099 4742 generic.go:334] "Generic (PLEG): container finished" podID="7cc340c5-1842-4b38-af61-4cc7f24b6118" containerID="79c48523795f949a3e88f948381417c01fc32793400c7098935ffcf6596380d5" exitCode=0 Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.176632 4742 generic.go:334] "Generic (PLEG): container finished" podID="7cc340c5-1842-4b38-af61-4cc7f24b6118" containerID="4bb2baaec185e8417c830da7f44f14ec9002a157e466150af2923ab43a1992e3" exitCode=2 Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.176186 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7cc340c5-1842-4b38-af61-4cc7f24b6118","Type":"ContainerDied","Data":"79c48523795f949a3e88f948381417c01fc32793400c7098935ffcf6596380d5"} Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.176705 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7cc340c5-1842-4b38-af61-4cc7f24b6118","Type":"ContainerDied","Data":"4bb2baaec185e8417c830da7f44f14ec9002a157e466150af2923ab43a1992e3"} Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.176651 4742 generic.go:334] "Generic (PLEG): container finished" podID="7cc340c5-1842-4b38-af61-4cc7f24b6118" containerID="a3a7c965f36083dda1ad293969a949eb60bda045103f9b40ecc66e28cd7143d3" exitCode=0 Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.176728 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7cc340c5-1842-4b38-af61-4cc7f24b6118","Type":"ContainerDied","Data":"a3a7c965f36083dda1ad293969a949eb60bda045103f9b40ecc66e28cd7143d3"} Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.180388 4742 generic.go:334] "Generic (PLEG): container finished" podID="a3450678-40d0-44f3-bcd6-c9d5b773812b" containerID="d77cb7753d933ccde9dd454627520ab2dfcecd93ead766179bb9b76c07274bed" exitCode=0 Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.180435 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a3450678-40d0-44f3-bcd6-c9d5b773812b","Type":"ContainerDied","Data":"d77cb7753d933ccde9dd454627520ab2dfcecd93ead766179bb9b76c07274bed"} Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.330068 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.390827 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a3450678-40d0-44f3-bcd6-c9d5b773812b-httpd-run\") pod \"a3450678-40d0-44f3-bcd6-c9d5b773812b\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") " Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.391192 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g9f7d\" (UniqueName: \"kubernetes.io/projected/a3450678-40d0-44f3-bcd6-c9d5b773812b-kube-api-access-g9f7d\") pod \"a3450678-40d0-44f3-bcd6-c9d5b773812b\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") " Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.391247 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3450678-40d0-44f3-bcd6-c9d5b773812b-scripts\") pod \"a3450678-40d0-44f3-bcd6-c9d5b773812b\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") " Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.391277 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3450678-40d0-44f3-bcd6-c9d5b773812b-config-data\") pod \"a3450678-40d0-44f3-bcd6-c9d5b773812b\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") " Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.391434 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3450678-40d0-44f3-bcd6-c9d5b773812b-logs\") pod \"a3450678-40d0-44f3-bcd6-c9d5b773812b\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") " Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.391531 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"a3450678-40d0-44f3-bcd6-c9d5b773812b\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") " Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.391577 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3450678-40d0-44f3-bcd6-c9d5b773812b-public-tls-certs\") pod \"a3450678-40d0-44f3-bcd6-c9d5b773812b\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") " Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.391640 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3450678-40d0-44f3-bcd6-c9d5b773812b-combined-ca-bundle\") pod \"a3450678-40d0-44f3-bcd6-c9d5b773812b\" (UID: \"a3450678-40d0-44f3-bcd6-c9d5b773812b\") " Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.392095 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3450678-40d0-44f3-bcd6-c9d5b773812b-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "a3450678-40d0-44f3-bcd6-c9d5b773812b" (UID: "a3450678-40d0-44f3-bcd6-c9d5b773812b"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.392173 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3450678-40d0-44f3-bcd6-c9d5b773812b-logs" (OuterVolumeSpecName: "logs") pod "a3450678-40d0-44f3-bcd6-c9d5b773812b" (UID: "a3450678-40d0-44f3-bcd6-c9d5b773812b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.392329 4742 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a3450678-40d0-44f3-bcd6-c9d5b773812b-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.392352 4742 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3450678-40d0-44f3-bcd6-c9d5b773812b-logs\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.397329 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3450678-40d0-44f3-bcd6-c9d5b773812b-scripts" (OuterVolumeSpecName: "scripts") pod "a3450678-40d0-44f3-bcd6-c9d5b773812b" (UID: "a3450678-40d0-44f3-bcd6-c9d5b773812b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.414673 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "glance") pod "a3450678-40d0-44f3-bcd6-c9d5b773812b" (UID: "a3450678-40d0-44f3-bcd6-c9d5b773812b"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.440842 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3450678-40d0-44f3-bcd6-c9d5b773812b-kube-api-access-g9f7d" (OuterVolumeSpecName: "kube-api-access-g9f7d") pod "a3450678-40d0-44f3-bcd6-c9d5b773812b" (UID: "a3450678-40d0-44f3-bcd6-c9d5b773812b"). InnerVolumeSpecName "kube-api-access-g9f7d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.474957 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3450678-40d0-44f3-bcd6-c9d5b773812b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a3450678-40d0-44f3-bcd6-c9d5b773812b" (UID: "a3450678-40d0-44f3-bcd6-c9d5b773812b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.486514 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3450678-40d0-44f3-bcd6-c9d5b773812b-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "a3450678-40d0-44f3-bcd6-c9d5b773812b" (UID: "a3450678-40d0-44f3-bcd6-c9d5b773812b"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.496139 4742 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.496575 4742 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3450678-40d0-44f3-bcd6-c9d5b773812b-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.496669 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3450678-40d0-44f3-bcd6-c9d5b773812b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.496727 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g9f7d\" (UniqueName: \"kubernetes.io/projected/a3450678-40d0-44f3-bcd6-c9d5b773812b-kube-api-access-g9f7d\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.496788 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3450678-40d0-44f3-bcd6-c9d5b773812b-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.509032 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3450678-40d0-44f3-bcd6-c9d5b773812b-config-data" (OuterVolumeSpecName: "config-data") pod "a3450678-40d0-44f3-bcd6-c9d5b773812b" (UID: "a3450678-40d0-44f3-bcd6-c9d5b773812b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.525113 4742 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.598321 4742 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.598530 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3450678-40d0-44f3-bcd6-c9d5b773812b-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.781248 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-dzkx8"] Dec 05 06:13:04 crc kubenswrapper[4742]: E1205 06:13:04.781576 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3450678-40d0-44f3-bcd6-c9d5b773812b" containerName="glance-log" Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.781592 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3450678-40d0-44f3-bcd6-c9d5b773812b" containerName="glance-log" Dec 05 06:13:04 crc kubenswrapper[4742]: E1205 06:13:04.781606 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3450678-40d0-44f3-bcd6-c9d5b773812b" containerName="glance-httpd" Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.781612 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3450678-40d0-44f3-bcd6-c9d5b773812b" containerName="glance-httpd" Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.781779 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3450678-40d0-44f3-bcd6-c9d5b773812b" containerName="glance-log" Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.781805 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3450678-40d0-44f3-bcd6-c9d5b773812b" containerName="glance-httpd" Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.782325 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-dzkx8" Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.796108 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-dzkx8"] Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.904763 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xl7d\" (UniqueName: \"kubernetes.io/projected/24b6ea24-e4d2-42f2-8a10-720d0a3445e4-kube-api-access-4xl7d\") pod \"nova-api-db-create-dzkx8\" (UID: \"24b6ea24-e4d2-42f2-8a10-720d0a3445e4\") " pod="openstack/nova-api-db-create-dzkx8" Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.904887 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/24b6ea24-e4d2-42f2-8a10-720d0a3445e4-operator-scripts\") pod \"nova-api-db-create-dzkx8\" (UID: \"24b6ea24-e4d2-42f2-8a10-720d0a3445e4\") " pod="openstack/nova-api-db-create-dzkx8" Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.941111 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-e0d5-account-create-update-7vcpx"] Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.943182 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-e0d5-account-create-update-7vcpx" Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.946151 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 05 06:13:04 crc kubenswrapper[4742]: I1205 06:13:04.974259 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-e0d5-account-create-update-7vcpx"] Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.008036 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xl7d\" (UniqueName: \"kubernetes.io/projected/24b6ea24-e4d2-42f2-8a10-720d0a3445e4-kube-api-access-4xl7d\") pod \"nova-api-db-create-dzkx8\" (UID: \"24b6ea24-e4d2-42f2-8a10-720d0a3445e4\") " pod="openstack/nova-api-db-create-dzkx8" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.008103 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77998b8e-507c-487f-8616-6fe17b8f9d9a-operator-scripts\") pod \"nova-api-e0d5-account-create-update-7vcpx\" (UID: \"77998b8e-507c-487f-8616-6fe17b8f9d9a\") " pod="openstack/nova-api-e0d5-account-create-update-7vcpx" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.008183 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/24b6ea24-e4d2-42f2-8a10-720d0a3445e4-operator-scripts\") pod \"nova-api-db-create-dzkx8\" (UID: \"24b6ea24-e4d2-42f2-8a10-720d0a3445e4\") " pod="openstack/nova-api-db-create-dzkx8" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.008220 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7zgz\" (UniqueName: \"kubernetes.io/projected/77998b8e-507c-487f-8616-6fe17b8f9d9a-kube-api-access-x7zgz\") pod \"nova-api-e0d5-account-create-update-7vcpx\" (UID: \"77998b8e-507c-487f-8616-6fe17b8f9d9a\") " pod="openstack/nova-api-e0d5-account-create-update-7vcpx" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.008922 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/24b6ea24-e4d2-42f2-8a10-720d0a3445e4-operator-scripts\") pod \"nova-api-db-create-dzkx8\" (UID: \"24b6ea24-e4d2-42f2-8a10-720d0a3445e4\") " pod="openstack/nova-api-db-create-dzkx8" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.024849 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xl7d\" (UniqueName: \"kubernetes.io/projected/24b6ea24-e4d2-42f2-8a10-720d0a3445e4-kube-api-access-4xl7d\") pod \"nova-api-db-create-dzkx8\" (UID: \"24b6ea24-e4d2-42f2-8a10-720d0a3445e4\") " pod="openstack/nova-api-db-create-dzkx8" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.082400 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-25s6b"] Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.083787 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-25s6b" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.089541 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-36a3-account-create-update-ltscs"] Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.090529 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-36a3-account-create-update-ltscs" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.096716 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-dzkx8" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.100100 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.114844 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77998b8e-507c-487f-8616-6fe17b8f9d9a-operator-scripts\") pod \"nova-api-e0d5-account-create-update-7vcpx\" (UID: \"77998b8e-507c-487f-8616-6fe17b8f9d9a\") " pod="openstack/nova-api-e0d5-account-create-update-7vcpx" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.115236 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7zgz\" (UniqueName: \"kubernetes.io/projected/77998b8e-507c-487f-8616-6fe17b8f9d9a-kube-api-access-x7zgz\") pod \"nova-api-e0d5-account-create-update-7vcpx\" (UID: \"77998b8e-507c-487f-8616-6fe17b8f9d9a\") " pod="openstack/nova-api-e0d5-account-create-update-7vcpx" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.116121 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-36a3-account-create-update-ltscs"] Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.116314 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77998b8e-507c-487f-8616-6fe17b8f9d9a-operator-scripts\") pod \"nova-api-e0d5-account-create-update-7vcpx\" (UID: \"77998b8e-507c-487f-8616-6fe17b8f9d9a\") " pod="openstack/nova-api-e0d5-account-create-update-7vcpx" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.129788 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-25s6b"] Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.132849 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7zgz\" (UniqueName: \"kubernetes.io/projected/77998b8e-507c-487f-8616-6fe17b8f9d9a-kube-api-access-x7zgz\") pod \"nova-api-e0d5-account-create-update-7vcpx\" (UID: \"77998b8e-507c-487f-8616-6fe17b8f9d9a\") " pod="openstack/nova-api-e0d5-account-create-update-7vcpx" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.179587 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-ks5x5"] Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.189543 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-ks5x5" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.198630 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-ks5x5"] Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.211480 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"a3450678-40d0-44f3-bcd6-c9d5b773812b","Type":"ContainerDied","Data":"dd6245598b0f9e8faf90c1422b43b1bf0fe55d37d9cee58ab9a62ae667550f33"} Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.211525 4742 scope.go:117] "RemoveContainer" containerID="d77cb7753d933ccde9dd454627520ab2dfcecd93ead766179bb9b76c07274bed" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.211697 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.217016 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-967bw\" (UniqueName: \"kubernetes.io/projected/3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f-kube-api-access-967bw\") pod \"nova-cell0-36a3-account-create-update-ltscs\" (UID: \"3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f\") " pod="openstack/nova-cell0-36a3-account-create-update-ltscs" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.217300 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f-operator-scripts\") pod \"nova-cell0-36a3-account-create-update-ltscs\" (UID: \"3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f\") " pod="openstack/nova-cell0-36a3-account-create-update-ltscs" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.217480 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ljlg\" (UniqueName: \"kubernetes.io/projected/4341c972-cfe7-4940-ad91-1f8a4d6138ab-kube-api-access-2ljlg\") pod \"nova-cell0-db-create-25s6b\" (UID: \"4341c972-cfe7-4940-ad91-1f8a4d6138ab\") " pod="openstack/nova-cell0-db-create-25s6b" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.217554 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4341c972-cfe7-4940-ad91-1f8a4d6138ab-operator-scripts\") pod \"nova-cell0-db-create-25s6b\" (UID: \"4341c972-cfe7-4940-ad91-1f8a4d6138ab\") " pod="openstack/nova-cell0-db-create-25s6b" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.287319 4742 scope.go:117] "RemoveContainer" containerID="911e97f444607219414c7206dea5e324e9af39fb5fc4cbb23881ea0ab567c41c" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.292375 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.301390 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-e0d5-account-create-update-7vcpx" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.301877 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.315738 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.318471 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.318880 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tw4p6\" (UniqueName: \"kubernetes.io/projected/59e2f35a-a430-4187-b30a-43a8f1872d9f-kube-api-access-tw4p6\") pod \"nova-cell1-db-create-ks5x5\" (UID: \"59e2f35a-a430-4187-b30a-43a8f1872d9f\") " pod="openstack/nova-cell1-db-create-ks5x5" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.318936 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ljlg\" (UniqueName: \"kubernetes.io/projected/4341c972-cfe7-4940-ad91-1f8a4d6138ab-kube-api-access-2ljlg\") pod \"nova-cell0-db-create-25s6b\" (UID: \"4341c972-cfe7-4940-ad91-1f8a4d6138ab\") " pod="openstack/nova-cell0-db-create-25s6b" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.318957 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4341c972-cfe7-4940-ad91-1f8a4d6138ab-operator-scripts\") pod \"nova-cell0-db-create-25s6b\" (UID: \"4341c972-cfe7-4940-ad91-1f8a4d6138ab\") " pod="openstack/nova-cell0-db-create-25s6b" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.319010 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-967bw\" (UniqueName: \"kubernetes.io/projected/3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f-kube-api-access-967bw\") pod \"nova-cell0-36a3-account-create-update-ltscs\" (UID: \"3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f\") " pod="openstack/nova-cell0-36a3-account-create-update-ltscs" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.319048 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f-operator-scripts\") pod \"nova-cell0-36a3-account-create-update-ltscs\" (UID: \"3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f\") " pod="openstack/nova-cell0-36a3-account-create-update-ltscs" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.319124 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59e2f35a-a430-4187-b30a-43a8f1872d9f-operator-scripts\") pod \"nova-cell1-db-create-ks5x5\" (UID: \"59e2f35a-a430-4187-b30a-43a8f1872d9f\") " pod="openstack/nova-cell1-db-create-ks5x5" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.319904 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f-operator-scripts\") pod \"nova-cell0-36a3-account-create-update-ltscs\" (UID: \"3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f\") " pod="openstack/nova-cell0-36a3-account-create-update-ltscs" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.320160 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4341c972-cfe7-4940-ad91-1f8a4d6138ab-operator-scripts\") pod \"nova-cell0-db-create-25s6b\" (UID: \"4341c972-cfe7-4940-ad91-1f8a4d6138ab\") " pod="openstack/nova-cell0-db-create-25s6b" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.322558 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.322872 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.343363 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ljlg\" (UniqueName: \"kubernetes.io/projected/4341c972-cfe7-4940-ad91-1f8a4d6138ab-kube-api-access-2ljlg\") pod \"nova-cell0-db-create-25s6b\" (UID: \"4341c972-cfe7-4940-ad91-1f8a4d6138ab\") " pod="openstack/nova-cell0-db-create-25s6b" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.344647 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.374860 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-967bw\" (UniqueName: \"kubernetes.io/projected/3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f-kube-api-access-967bw\") pod \"nova-cell0-36a3-account-create-update-ltscs\" (UID: \"3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f\") " pod="openstack/nova-cell0-36a3-account-create-update-ltscs" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.410204 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-25s6b" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.412197 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-2c4b-account-create-update-rcrw9"] Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.413254 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-2c4b-account-create-update-rcrw9" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.414732 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.421458 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7a764d5-447f-483d-b819-0e398e749600-logs\") pod \"glance-default-external-api-0\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") " pod="openstack/glance-default-external-api-0" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.421539 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59e2f35a-a430-4187-b30a-43a8f1872d9f-operator-scripts\") pod \"nova-cell1-db-create-ks5x5\" (UID: \"59e2f35a-a430-4187-b30a-43a8f1872d9f\") " pod="openstack/nova-cell1-db-create-ks5x5" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.421581 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8fbr\" (UniqueName: \"kubernetes.io/projected/d7a764d5-447f-483d-b819-0e398e749600-kube-api-access-c8fbr\") pod \"glance-default-external-api-0\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") " pod="openstack/glance-default-external-api-0" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.421600 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7a764d5-447f-483d-b819-0e398e749600-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") " pod="openstack/glance-default-external-api-0" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.421625 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tw4p6\" (UniqueName: \"kubernetes.io/projected/59e2f35a-a430-4187-b30a-43a8f1872d9f-kube-api-access-tw4p6\") pod \"nova-cell1-db-create-ks5x5\" (UID: \"59e2f35a-a430-4187-b30a-43a8f1872d9f\") " pod="openstack/nova-cell1-db-create-ks5x5" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.421648 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") " pod="openstack/glance-default-external-api-0" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.421679 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7a764d5-447f-483d-b819-0e398e749600-scripts\") pod \"glance-default-external-api-0\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") " pod="openstack/glance-default-external-api-0" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.421699 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7a764d5-447f-483d-b819-0e398e749600-config-data\") pod \"glance-default-external-api-0\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") " pod="openstack/glance-default-external-api-0" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.421716 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7a764d5-447f-483d-b819-0e398e749600-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") " pod="openstack/glance-default-external-api-0" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.421736 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d7a764d5-447f-483d-b819-0e398e749600-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") " pod="openstack/glance-default-external-api-0" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.423406 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59e2f35a-a430-4187-b30a-43a8f1872d9f-operator-scripts\") pod \"nova-cell1-db-create-ks5x5\" (UID: \"59e2f35a-a430-4187-b30a-43a8f1872d9f\") " pod="openstack/nova-cell1-db-create-ks5x5" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.423562 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-2c4b-account-create-update-rcrw9"] Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.431942 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-36a3-account-create-update-ltscs" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.449685 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tw4p6\" (UniqueName: \"kubernetes.io/projected/59e2f35a-a430-4187-b30a-43a8f1872d9f-kube-api-access-tw4p6\") pod \"nova-cell1-db-create-ks5x5\" (UID: \"59e2f35a-a430-4187-b30a-43a8f1872d9f\") " pod="openstack/nova-cell1-db-create-ks5x5" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.515618 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-ks5x5" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.524824 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7a764d5-447f-483d-b819-0e398e749600-logs\") pod \"glance-default-external-api-0\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") " pod="openstack/glance-default-external-api-0" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.524903 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe8a103e-e284-4ec2-b566-32a1180870c6-operator-scripts\") pod \"nova-cell1-2c4b-account-create-update-rcrw9\" (UID: \"fe8a103e-e284-4ec2-b566-32a1180870c6\") " pod="openstack/nova-cell1-2c4b-account-create-update-rcrw9" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.524926 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cmd4m\" (UniqueName: \"kubernetes.io/projected/fe8a103e-e284-4ec2-b566-32a1180870c6-kube-api-access-cmd4m\") pod \"nova-cell1-2c4b-account-create-update-rcrw9\" (UID: \"fe8a103e-e284-4ec2-b566-32a1180870c6\") " pod="openstack/nova-cell1-2c4b-account-create-update-rcrw9" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.524947 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8fbr\" (UniqueName: \"kubernetes.io/projected/d7a764d5-447f-483d-b819-0e398e749600-kube-api-access-c8fbr\") pod \"glance-default-external-api-0\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") " pod="openstack/glance-default-external-api-0" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.526589 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7a764d5-447f-483d-b819-0e398e749600-logs\") pod \"glance-default-external-api-0\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") " pod="openstack/glance-default-external-api-0" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.535542 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7a764d5-447f-483d-b819-0e398e749600-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") " pod="openstack/glance-default-external-api-0" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.524964 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7a764d5-447f-483d-b819-0e398e749600-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") " pod="openstack/glance-default-external-api-0" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.540491 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") " pod="openstack/glance-default-external-api-0" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.540537 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7a764d5-447f-483d-b819-0e398e749600-scripts\") pod \"glance-default-external-api-0\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") " pod="openstack/glance-default-external-api-0" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.540553 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7a764d5-447f-483d-b819-0e398e749600-config-data\") pod \"glance-default-external-api-0\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") " pod="openstack/glance-default-external-api-0" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.540575 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7a764d5-447f-483d-b819-0e398e749600-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") " pod="openstack/glance-default-external-api-0" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.540599 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d7a764d5-447f-483d-b819-0e398e749600-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") " pod="openstack/glance-default-external-api-0" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.540845 4742 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/glance-default-external-api-0" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.540980 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d7a764d5-447f-483d-b819-0e398e749600-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") " pod="openstack/glance-default-external-api-0" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.545644 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7a764d5-447f-483d-b819-0e398e749600-scripts\") pod \"glance-default-external-api-0\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") " pod="openstack/glance-default-external-api-0" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.552677 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8fbr\" (UniqueName: \"kubernetes.io/projected/d7a764d5-447f-483d-b819-0e398e749600-kube-api-access-c8fbr\") pod \"glance-default-external-api-0\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") " pod="openstack/glance-default-external-api-0" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.558014 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7a764d5-447f-483d-b819-0e398e749600-config-data\") pod \"glance-default-external-api-0\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") " pod="openstack/glance-default-external-api-0" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.562916 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7a764d5-447f-483d-b819-0e398e749600-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") " pod="openstack/glance-default-external-api-0" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.584844 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-external-api-0\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") " pod="openstack/glance-default-external-api-0" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.642664 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe8a103e-e284-4ec2-b566-32a1180870c6-operator-scripts\") pod \"nova-cell1-2c4b-account-create-update-rcrw9\" (UID: \"fe8a103e-e284-4ec2-b566-32a1180870c6\") " pod="openstack/nova-cell1-2c4b-account-create-update-rcrw9" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.642704 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cmd4m\" (UniqueName: \"kubernetes.io/projected/fe8a103e-e284-4ec2-b566-32a1180870c6-kube-api-access-cmd4m\") pod \"nova-cell1-2c4b-account-create-update-rcrw9\" (UID: \"fe8a103e-e284-4ec2-b566-32a1180870c6\") " pod="openstack/nova-cell1-2c4b-account-create-update-rcrw9" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.643731 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe8a103e-e284-4ec2-b566-32a1180870c6-operator-scripts\") pod \"nova-cell1-2c4b-account-create-update-rcrw9\" (UID: \"fe8a103e-e284-4ec2-b566-32a1180870c6\") " pod="openstack/nova-cell1-2c4b-account-create-update-rcrw9" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.651132 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-dzkx8"] Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.661181 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cmd4m\" (UniqueName: \"kubernetes.io/projected/fe8a103e-e284-4ec2-b566-32a1180870c6-kube-api-access-cmd4m\") pod \"nova-cell1-2c4b-account-create-update-rcrw9\" (UID: \"fe8a103e-e284-4ec2-b566-32a1180870c6\") " pod="openstack/nova-cell1-2c4b-account-create-update-rcrw9" Dec 05 06:13:05 crc kubenswrapper[4742]: W1205 06:13:05.662687 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod24b6ea24_e4d2_42f2_8a10_720d0a3445e4.slice/crio-25ed31014eacbde32efc3f069144578d786727582b02cb77e5b54e68177a5e12 WatchSource:0}: Error finding container 25ed31014eacbde32efc3f069144578d786727582b02cb77e5b54e68177a5e12: Status 404 returned error can't find the container with id 25ed31014eacbde32efc3f069144578d786727582b02cb77e5b54e68177a5e12 Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.828254 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.840649 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-2c4b-account-create-update-rcrw9" Dec 05 06:13:05 crc kubenswrapper[4742]: I1205 06:13:05.899075 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-e0d5-account-create-update-7vcpx"] Dec 05 06:13:06 crc kubenswrapper[4742]: I1205 06:13:06.036115 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-25s6b"] Dec 05 06:13:06 crc kubenswrapper[4742]: I1205 06:13:06.043230 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-36a3-account-create-update-ltscs"] Dec 05 06:13:06 crc kubenswrapper[4742]: I1205 06:13:06.050175 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-ks5x5"] Dec 05 06:13:06 crc kubenswrapper[4742]: I1205 06:13:06.137389 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 06:13:06 crc kubenswrapper[4742]: I1205 06:13:06.140311 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13" containerName="glance-log" containerID="cri-o://18773fd8cfb54fd920520afc01e72e5a3ec52859012030bd87061488e8603a77" gracePeriod=30 Dec 05 06:13:06 crc kubenswrapper[4742]: I1205 06:13:06.140561 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13" containerName="glance-httpd" containerID="cri-o://6f7180aed7ceb51843eaabd0a032e872d10e578593ffbf206208c7908ccaad69" gracePeriod=30 Dec 05 06:13:06 crc kubenswrapper[4742]: I1205 06:13:06.256098 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-e0d5-account-create-update-7vcpx" event={"ID":"77998b8e-507c-487f-8616-6fe17b8f9d9a","Type":"ContainerStarted","Data":"fb328515c2eaa845211b1a3bf7cd93c7e0738779e0a7f6ff99e15602b027e758"} Dec 05 06:13:06 crc kubenswrapper[4742]: I1205 06:13:06.256365 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-e0d5-account-create-update-7vcpx" event={"ID":"77998b8e-507c-487f-8616-6fe17b8f9d9a","Type":"ContainerStarted","Data":"540ee4f81853a332969f1d22ee4dbcdabc0d3c8ffa8495655a9d8ec48c4017b3"} Dec 05 06:13:06 crc kubenswrapper[4742]: I1205 06:13:06.268978 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-36a3-account-create-update-ltscs" event={"ID":"3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f","Type":"ContainerStarted","Data":"18d757be6719e11c423c93efbcd4e2def089fdb956e72fbefcd345266ce0b416"} Dec 05 06:13:06 crc kubenswrapper[4742]: I1205 06:13:06.271090 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-25s6b" event={"ID":"4341c972-cfe7-4940-ad91-1f8a4d6138ab","Type":"ContainerStarted","Data":"b3ebad41f6b679086830d75557ac6adbaed14676432a5883e6b94420b490d762"} Dec 05 06:13:06 crc kubenswrapper[4742]: I1205 06:13:06.278502 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-ks5x5" event={"ID":"59e2f35a-a430-4187-b30a-43a8f1872d9f","Type":"ContainerStarted","Data":"38eacd5fed1b03d978b69dd78bb9c855e249e02dc0ff73c07d046ac95ad82459"} Dec 05 06:13:06 crc kubenswrapper[4742]: I1205 06:13:06.279517 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-e0d5-account-create-update-7vcpx" podStartSLOduration=2.279500185 podStartE2EDuration="2.279500185s" podCreationTimestamp="2025-12-05 06:13:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:13:06.277294107 +0000 UTC m=+1262.189429169" watchObservedRunningTime="2025-12-05 06:13:06.279500185 +0000 UTC m=+1262.191635237" Dec 05 06:13:06 crc kubenswrapper[4742]: I1205 06:13:06.285934 4742 generic.go:334] "Generic (PLEG): container finished" podID="24b6ea24-e4d2-42f2-8a10-720d0a3445e4" containerID="40b5ce673e98b0c1b9c40ba9310397a6bb881d9fcfffb97ec613ab9a221811aa" exitCode=0 Dec 05 06:13:06 crc kubenswrapper[4742]: I1205 06:13:06.286035 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-dzkx8" event={"ID":"24b6ea24-e4d2-42f2-8a10-720d0a3445e4","Type":"ContainerDied","Data":"40b5ce673e98b0c1b9c40ba9310397a6bb881d9fcfffb97ec613ab9a221811aa"} Dec 05 06:13:06 crc kubenswrapper[4742]: I1205 06:13:06.286177 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-dzkx8" event={"ID":"24b6ea24-e4d2-42f2-8a10-720d0a3445e4","Type":"ContainerStarted","Data":"25ed31014eacbde32efc3f069144578d786727582b02cb77e5b54e68177a5e12"} Dec 05 06:13:06 crc kubenswrapper[4742]: I1205 06:13:06.404786 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3450678-40d0-44f3-bcd6-c9d5b773812b" path="/var/lib/kubelet/pods/a3450678-40d0-44f3-bcd6-c9d5b773812b/volumes" Dec 05 06:13:06 crc kubenswrapper[4742]: I1205 06:13:06.495750 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-2c4b-account-create-update-rcrw9"] Dec 05 06:13:06 crc kubenswrapper[4742]: I1205 06:13:06.504310 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 06:13:07 crc kubenswrapper[4742]: I1205 06:13:07.321424 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d7a764d5-447f-483d-b819-0e398e749600","Type":"ContainerStarted","Data":"f455df6d411179859e60d3c9b127100c03c9bd439f8c01b9bb223b4b2bbfd0d5"} Dec 05 06:13:07 crc kubenswrapper[4742]: I1205 06:13:07.321924 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d7a764d5-447f-483d-b819-0e398e749600","Type":"ContainerStarted","Data":"7beb11964c2be510ceb27b8bbc83c23f39a9eb81f6974352e9728807725b3cf7"} Dec 05 06:13:07 crc kubenswrapper[4742]: I1205 06:13:07.324890 4742 generic.go:334] "Generic (PLEG): container finished" podID="77998b8e-507c-487f-8616-6fe17b8f9d9a" containerID="fb328515c2eaa845211b1a3bf7cd93c7e0738779e0a7f6ff99e15602b027e758" exitCode=0 Dec 05 06:13:07 crc kubenswrapper[4742]: I1205 06:13:07.324933 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-e0d5-account-create-update-7vcpx" event={"ID":"77998b8e-507c-487f-8616-6fe17b8f9d9a","Type":"ContainerDied","Data":"fb328515c2eaa845211b1a3bf7cd93c7e0738779e0a7f6ff99e15602b027e758"} Dec 05 06:13:07 crc kubenswrapper[4742]: I1205 06:13:07.330681 4742 generic.go:334] "Generic (PLEG): container finished" podID="fe8a103e-e284-4ec2-b566-32a1180870c6" containerID="f1062878d0619a2d1539b770dcb9bb0c2fd7ff348328e25b5c911ce879853948" exitCode=0 Dec 05 06:13:07 crc kubenswrapper[4742]: I1205 06:13:07.330765 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-2c4b-account-create-update-rcrw9" event={"ID":"fe8a103e-e284-4ec2-b566-32a1180870c6","Type":"ContainerDied","Data":"f1062878d0619a2d1539b770dcb9bb0c2fd7ff348328e25b5c911ce879853948"} Dec 05 06:13:07 crc kubenswrapper[4742]: I1205 06:13:07.330786 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-2c4b-account-create-update-rcrw9" event={"ID":"fe8a103e-e284-4ec2-b566-32a1180870c6","Type":"ContainerStarted","Data":"320c522b636cd648bbd34fa2eb215594daadc4760194940ece0dc98cc956fd17"} Dec 05 06:13:07 crc kubenswrapper[4742]: I1205 06:13:07.332864 4742 generic.go:334] "Generic (PLEG): container finished" podID="3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f" containerID="c642c9fed985ae27dd4b8a218a8892298ed31e277a17c1b94509b1c0f51a2fb9" exitCode=0 Dec 05 06:13:07 crc kubenswrapper[4742]: I1205 06:13:07.332936 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-36a3-account-create-update-ltscs" event={"ID":"3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f","Type":"ContainerDied","Data":"c642c9fed985ae27dd4b8a218a8892298ed31e277a17c1b94509b1c0f51a2fb9"} Dec 05 06:13:07 crc kubenswrapper[4742]: I1205 06:13:07.335428 4742 generic.go:334] "Generic (PLEG): container finished" podID="4341c972-cfe7-4940-ad91-1f8a4d6138ab" containerID="305cc444eeb82b0e93ef7fa69f0ca21257483b190f06e98feae111a5d68245ed" exitCode=0 Dec 05 06:13:07 crc kubenswrapper[4742]: I1205 06:13:07.335488 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-25s6b" event={"ID":"4341c972-cfe7-4940-ad91-1f8a4d6138ab","Type":"ContainerDied","Data":"305cc444eeb82b0e93ef7fa69f0ca21257483b190f06e98feae111a5d68245ed"} Dec 05 06:13:07 crc kubenswrapper[4742]: I1205 06:13:07.339360 4742 generic.go:334] "Generic (PLEG): container finished" podID="cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13" containerID="18773fd8cfb54fd920520afc01e72e5a3ec52859012030bd87061488e8603a77" exitCode=143 Dec 05 06:13:07 crc kubenswrapper[4742]: I1205 06:13:07.339427 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13","Type":"ContainerDied","Data":"18773fd8cfb54fd920520afc01e72e5a3ec52859012030bd87061488e8603a77"} Dec 05 06:13:07 crc kubenswrapper[4742]: I1205 06:13:07.344118 4742 generic.go:334] "Generic (PLEG): container finished" podID="59e2f35a-a430-4187-b30a-43a8f1872d9f" containerID="071226b7533691457a48d507fb4948e3f54cda1b60d42e10ff66b88d8eeb9aed" exitCode=0 Dec 05 06:13:07 crc kubenswrapper[4742]: I1205 06:13:07.344193 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-ks5x5" event={"ID":"59e2f35a-a430-4187-b30a-43a8f1872d9f","Type":"ContainerDied","Data":"071226b7533691457a48d507fb4948e3f54cda1b60d42e10ff66b88d8eeb9aed"} Dec 05 06:13:07 crc kubenswrapper[4742]: I1205 06:13:07.773684 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-dzkx8" Dec 05 06:13:07 crc kubenswrapper[4742]: I1205 06:13:07.908833 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4xl7d\" (UniqueName: \"kubernetes.io/projected/24b6ea24-e4d2-42f2-8a10-720d0a3445e4-kube-api-access-4xl7d\") pod \"24b6ea24-e4d2-42f2-8a10-720d0a3445e4\" (UID: \"24b6ea24-e4d2-42f2-8a10-720d0a3445e4\") " Dec 05 06:13:07 crc kubenswrapper[4742]: I1205 06:13:07.908987 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/24b6ea24-e4d2-42f2-8a10-720d0a3445e4-operator-scripts\") pod \"24b6ea24-e4d2-42f2-8a10-720d0a3445e4\" (UID: \"24b6ea24-e4d2-42f2-8a10-720d0a3445e4\") " Dec 05 06:13:07 crc kubenswrapper[4742]: I1205 06:13:07.909622 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/24b6ea24-e4d2-42f2-8a10-720d0a3445e4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "24b6ea24-e4d2-42f2-8a10-720d0a3445e4" (UID: "24b6ea24-e4d2-42f2-8a10-720d0a3445e4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:13:07 crc kubenswrapper[4742]: I1205 06:13:07.921283 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24b6ea24-e4d2-42f2-8a10-720d0a3445e4-kube-api-access-4xl7d" (OuterVolumeSpecName: "kube-api-access-4xl7d") pod "24b6ea24-e4d2-42f2-8a10-720d0a3445e4" (UID: "24b6ea24-e4d2-42f2-8a10-720d0a3445e4"). InnerVolumeSpecName "kube-api-access-4xl7d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:13:08 crc kubenswrapper[4742]: I1205 06:13:08.010700 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4xl7d\" (UniqueName: \"kubernetes.io/projected/24b6ea24-e4d2-42f2-8a10-720d0a3445e4-kube-api-access-4xl7d\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:08 crc kubenswrapper[4742]: I1205 06:13:08.010734 4742 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/24b6ea24-e4d2-42f2-8a10-720d0a3445e4-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:08 crc kubenswrapper[4742]: I1205 06:13:08.362134 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d7a764d5-447f-483d-b819-0e398e749600","Type":"ContainerStarted","Data":"a1b30e5b41ae0a67e19767b1176483a9b711ab959c0c1007661ee4670c30e081"} Dec 05 06:13:08 crc kubenswrapper[4742]: I1205 06:13:08.365629 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-dzkx8" Dec 05 06:13:08 crc kubenswrapper[4742]: I1205 06:13:08.367315 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-dzkx8" event={"ID":"24b6ea24-e4d2-42f2-8a10-720d0a3445e4","Type":"ContainerDied","Data":"25ed31014eacbde32efc3f069144578d786727582b02cb77e5b54e68177a5e12"} Dec 05 06:13:08 crc kubenswrapper[4742]: I1205 06:13:08.367348 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="25ed31014eacbde32efc3f069144578d786727582b02cb77e5b54e68177a5e12" Dec 05 06:13:08 crc kubenswrapper[4742]: I1205 06:13:08.405329 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.405304219 podStartE2EDuration="3.405304219s" podCreationTimestamp="2025-12-05 06:13:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:13:08.394786689 +0000 UTC m=+1264.306921751" watchObservedRunningTime="2025-12-05 06:13:08.405304219 +0000 UTC m=+1264.317439291" Dec 05 06:13:08 crc kubenswrapper[4742]: I1205 06:13:08.783284 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-e0d5-account-create-update-7vcpx" Dec 05 06:13:08 crc kubenswrapper[4742]: I1205 06:13:08.909148 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-ks5x5" Dec 05 06:13:08 crc kubenswrapper[4742]: I1205 06:13:08.913692 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-36a3-account-create-update-ltscs" Dec 05 06:13:08 crc kubenswrapper[4742]: I1205 06:13:08.923776 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-25s6b" Dec 05 06:13:08 crc kubenswrapper[4742]: I1205 06:13:08.925241 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77998b8e-507c-487f-8616-6fe17b8f9d9a-operator-scripts\") pod \"77998b8e-507c-487f-8616-6fe17b8f9d9a\" (UID: \"77998b8e-507c-487f-8616-6fe17b8f9d9a\") " Dec 05 06:13:08 crc kubenswrapper[4742]: I1205 06:13:08.925423 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zgz\" (UniqueName: \"kubernetes.io/projected/77998b8e-507c-487f-8616-6fe17b8f9d9a-kube-api-access-x7zgz\") pod \"77998b8e-507c-487f-8616-6fe17b8f9d9a\" (UID: \"77998b8e-507c-487f-8616-6fe17b8f9d9a\") " Dec 05 06:13:08 crc kubenswrapper[4742]: I1205 06:13:08.925869 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/77998b8e-507c-487f-8616-6fe17b8f9d9a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "77998b8e-507c-487f-8616-6fe17b8f9d9a" (UID: "77998b8e-507c-487f-8616-6fe17b8f9d9a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:13:08 crc kubenswrapper[4742]: I1205 06:13:08.935265 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77998b8e-507c-487f-8616-6fe17b8f9d9a-kube-api-access-x7zgz" (OuterVolumeSpecName: "kube-api-access-x7zgz") pod "77998b8e-507c-487f-8616-6fe17b8f9d9a" (UID: "77998b8e-507c-487f-8616-6fe17b8f9d9a"). InnerVolumeSpecName "kube-api-access-x7zgz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:13:08 crc kubenswrapper[4742]: I1205 06:13:08.969769 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-2c4b-account-create-update-rcrw9" Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.027396 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59e2f35a-a430-4187-b30a-43a8f1872d9f-operator-scripts\") pod \"59e2f35a-a430-4187-b30a-43a8f1872d9f\" (UID: \"59e2f35a-a430-4187-b30a-43a8f1872d9f\") " Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.027743 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2ljlg\" (UniqueName: \"kubernetes.io/projected/4341c972-cfe7-4940-ad91-1f8a4d6138ab-kube-api-access-2ljlg\") pod \"4341c972-cfe7-4940-ad91-1f8a4d6138ab\" (UID: \"4341c972-cfe7-4940-ad91-1f8a4d6138ab\") " Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.027771 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4341c972-cfe7-4940-ad91-1f8a4d6138ab-operator-scripts\") pod \"4341c972-cfe7-4940-ad91-1f8a4d6138ab\" (UID: \"4341c972-cfe7-4940-ad91-1f8a4d6138ab\") " Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.027824 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-967bw\" (UniqueName: \"kubernetes.io/projected/3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f-kube-api-access-967bw\") pod \"3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f\" (UID: \"3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f\") " Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.027872 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tw4p6\" (UniqueName: \"kubernetes.io/projected/59e2f35a-a430-4187-b30a-43a8f1872d9f-kube-api-access-tw4p6\") pod \"59e2f35a-a430-4187-b30a-43a8f1872d9f\" (UID: \"59e2f35a-a430-4187-b30a-43a8f1872d9f\") " Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.027910 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f-operator-scripts\") pod \"3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f\" (UID: \"3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f\") " Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.028067 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/59e2f35a-a430-4187-b30a-43a8f1872d9f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "59e2f35a-a430-4187-b30a-43a8f1872d9f" (UID: "59e2f35a-a430-4187-b30a-43a8f1872d9f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.028455 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f" (UID: "3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.028627 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4341c972-cfe7-4940-ad91-1f8a4d6138ab-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4341c972-cfe7-4940-ad91-1f8a4d6138ab" (UID: "4341c972-cfe7-4940-ad91-1f8a4d6138ab"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.028680 4742 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/77998b8e-507c-487f-8616-6fe17b8f9d9a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.028798 4742 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.028860 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zgz\" (UniqueName: \"kubernetes.io/projected/77998b8e-507c-487f-8616-6fe17b8f9d9a-kube-api-access-x7zgz\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.028934 4742 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59e2f35a-a430-4187-b30a-43a8f1872d9f-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.033020 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4341c972-cfe7-4940-ad91-1f8a4d6138ab-kube-api-access-2ljlg" (OuterVolumeSpecName: "kube-api-access-2ljlg") pod "4341c972-cfe7-4940-ad91-1f8a4d6138ab" (UID: "4341c972-cfe7-4940-ad91-1f8a4d6138ab"). InnerVolumeSpecName "kube-api-access-2ljlg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.033066 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59e2f35a-a430-4187-b30a-43a8f1872d9f-kube-api-access-tw4p6" (OuterVolumeSpecName: "kube-api-access-tw4p6") pod "59e2f35a-a430-4187-b30a-43a8f1872d9f" (UID: "59e2f35a-a430-4187-b30a-43a8f1872d9f"). InnerVolumeSpecName "kube-api-access-tw4p6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.034229 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f-kube-api-access-967bw" (OuterVolumeSpecName: "kube-api-access-967bw") pod "3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f" (UID: "3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f"). InnerVolumeSpecName "kube-api-access-967bw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.129958 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cmd4m\" (UniqueName: \"kubernetes.io/projected/fe8a103e-e284-4ec2-b566-32a1180870c6-kube-api-access-cmd4m\") pod \"fe8a103e-e284-4ec2-b566-32a1180870c6\" (UID: \"fe8a103e-e284-4ec2-b566-32a1180870c6\") " Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.130027 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe8a103e-e284-4ec2-b566-32a1180870c6-operator-scripts\") pod \"fe8a103e-e284-4ec2-b566-32a1180870c6\" (UID: \"fe8a103e-e284-4ec2-b566-32a1180870c6\") " Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.130492 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe8a103e-e284-4ec2-b566-32a1180870c6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fe8a103e-e284-4ec2-b566-32a1180870c6" (UID: "fe8a103e-e284-4ec2-b566-32a1180870c6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.130560 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2ljlg\" (UniqueName: \"kubernetes.io/projected/4341c972-cfe7-4940-ad91-1f8a4d6138ab-kube-api-access-2ljlg\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.130577 4742 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4341c972-cfe7-4940-ad91-1f8a4d6138ab-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.130602 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-967bw\" (UniqueName: \"kubernetes.io/projected/3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f-kube-api-access-967bw\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.130612 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tw4p6\" (UniqueName: \"kubernetes.io/projected/59e2f35a-a430-4187-b30a-43a8f1872d9f-kube-api-access-tw4p6\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.134435 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe8a103e-e284-4ec2-b566-32a1180870c6-kube-api-access-cmd4m" (OuterVolumeSpecName: "kube-api-access-cmd4m") pod "fe8a103e-e284-4ec2-b566-32a1180870c6" (UID: "fe8a103e-e284-4ec2-b566-32a1180870c6"). InnerVolumeSpecName "kube-api-access-cmd4m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.232219 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cmd4m\" (UniqueName: \"kubernetes.io/projected/fe8a103e-e284-4ec2-b566-32a1180870c6-kube-api-access-cmd4m\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.232245 4742 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fe8a103e-e284-4ec2-b566-32a1180870c6-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.379533 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-25s6b" event={"ID":"4341c972-cfe7-4940-ad91-1f8a4d6138ab","Type":"ContainerDied","Data":"b3ebad41f6b679086830d75557ac6adbaed14676432a5883e6b94420b490d762"} Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.379569 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b3ebad41f6b679086830d75557ac6adbaed14676432a5883e6b94420b490d762" Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.379618 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-25s6b" Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.389596 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-36a3-account-create-update-ltscs" event={"ID":"3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f","Type":"ContainerDied","Data":"18d757be6719e11c423c93efbcd4e2def089fdb956e72fbefcd345266ce0b416"} Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.389638 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="18d757be6719e11c423c93efbcd4e2def089fdb956e72fbefcd345266ce0b416" Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.389787 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-36a3-account-create-update-ltscs" Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.393229 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-e0d5-account-create-update-7vcpx" event={"ID":"77998b8e-507c-487f-8616-6fe17b8f9d9a","Type":"ContainerDied","Data":"540ee4f81853a332969f1d22ee4dbcdabc0d3c8ffa8495655a9d8ec48c4017b3"} Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.393251 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="540ee4f81853a332969f1d22ee4dbcdabc0d3c8ffa8495655a9d8ec48c4017b3" Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.393280 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-e0d5-account-create-update-7vcpx" Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.403359 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-2c4b-account-create-update-rcrw9" Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.403430 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-2c4b-account-create-update-rcrw9" event={"ID":"fe8a103e-e284-4ec2-b566-32a1180870c6","Type":"ContainerDied","Data":"320c522b636cd648bbd34fa2eb215594daadc4760194940ece0dc98cc956fd17"} Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.403507 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="320c522b636cd648bbd34fa2eb215594daadc4760194940ece0dc98cc956fd17" Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.408465 4742 generic.go:334] "Generic (PLEG): container finished" podID="cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13" containerID="6f7180aed7ceb51843eaabd0a032e872d10e578593ffbf206208c7908ccaad69" exitCode=0 Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.408557 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13","Type":"ContainerDied","Data":"6f7180aed7ceb51843eaabd0a032e872d10e578593ffbf206208c7908ccaad69"} Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.415004 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-ks5x5" event={"ID":"59e2f35a-a430-4187-b30a-43a8f1872d9f","Type":"ContainerDied","Data":"38eacd5fed1b03d978b69dd78bb9c855e249e02dc0ff73c07d046ac95ad82459"} Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.415118 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-ks5x5" Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.415139 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="38eacd5fed1b03d978b69dd78bb9c855e249e02dc0ff73c07d046ac95ad82459" Dec 05 06:13:09 crc kubenswrapper[4742]: I1205 06:13:09.880222 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.048531 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thkmw\" (UniqueName: \"kubernetes.io/projected/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-kube-api-access-thkmw\") pod \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") " Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.048577 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") " Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.048621 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-httpd-run\") pod \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") " Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.048643 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-logs\") pod \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") " Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.048699 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-internal-tls-certs\") pod \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") " Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.048792 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-config-data\") pod \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") " Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.048811 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-scripts\") pod \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") " Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.048831 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-combined-ca-bundle\") pod \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\" (UID: \"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13\") " Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.049348 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-logs" (OuterVolumeSpecName: "logs") pod "cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13" (UID: "cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.049367 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13" (UID: "cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.053286 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-kube-api-access-thkmw" (OuterVolumeSpecName: "kube-api-access-thkmw") pod "cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13" (UID: "cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13"). InnerVolumeSpecName "kube-api-access-thkmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.055163 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-scripts" (OuterVolumeSpecName: "scripts") pod "cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13" (UID: "cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.055161 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13" (UID: "cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.118162 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13" (UID: "cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.152130 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thkmw\" (UniqueName: \"kubernetes.io/projected/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-kube-api-access-thkmw\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.152167 4742 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.152178 4742 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.152186 4742 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-logs\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.152193 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.152201 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.176298 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13" (UID: "cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.184696 4742 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.202212 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-config-data" (OuterVolumeSpecName: "config-data") pod "cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13" (UID: "cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.254465 4742 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.254496 4742 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.254505 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.297190 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-t2p4w"] Dec 05 06:13:10 crc kubenswrapper[4742]: E1205 06:13:10.297572 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77998b8e-507c-487f-8616-6fe17b8f9d9a" containerName="mariadb-account-create-update" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.297589 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="77998b8e-507c-487f-8616-6fe17b8f9d9a" containerName="mariadb-account-create-update" Dec 05 06:13:10 crc kubenswrapper[4742]: E1205 06:13:10.297600 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24b6ea24-e4d2-42f2-8a10-720d0a3445e4" containerName="mariadb-database-create" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.297628 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="24b6ea24-e4d2-42f2-8a10-720d0a3445e4" containerName="mariadb-database-create" Dec 05 06:13:10 crc kubenswrapper[4742]: E1205 06:13:10.297640 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59e2f35a-a430-4187-b30a-43a8f1872d9f" containerName="mariadb-database-create" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.297646 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="59e2f35a-a430-4187-b30a-43a8f1872d9f" containerName="mariadb-database-create" Dec 05 06:13:10 crc kubenswrapper[4742]: E1205 06:13:10.297668 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4341c972-cfe7-4940-ad91-1f8a4d6138ab" containerName="mariadb-database-create" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.297674 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="4341c972-cfe7-4940-ad91-1f8a4d6138ab" containerName="mariadb-database-create" Dec 05 06:13:10 crc kubenswrapper[4742]: E1205 06:13:10.297683 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13" containerName="glance-log" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.297688 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13" containerName="glance-log" Dec 05 06:13:10 crc kubenswrapper[4742]: E1205 06:13:10.297696 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f" containerName="mariadb-account-create-update" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.297702 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f" containerName="mariadb-account-create-update" Dec 05 06:13:10 crc kubenswrapper[4742]: E1205 06:13:10.297714 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13" containerName="glance-httpd" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.297721 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13" containerName="glance-httpd" Dec 05 06:13:10 crc kubenswrapper[4742]: E1205 06:13:10.297730 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe8a103e-e284-4ec2-b566-32a1180870c6" containerName="mariadb-account-create-update" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.297736 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe8a103e-e284-4ec2-b566-32a1180870c6" containerName="mariadb-account-create-update" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.297924 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="59e2f35a-a430-4187-b30a-43a8f1872d9f" containerName="mariadb-database-create" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.297938 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f" containerName="mariadb-account-create-update" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.297948 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="77998b8e-507c-487f-8616-6fe17b8f9d9a" containerName="mariadb-account-create-update" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.297959 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13" containerName="glance-httpd" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.297976 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe8a103e-e284-4ec2-b566-32a1180870c6" containerName="mariadb-account-create-update" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.297988 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="24b6ea24-e4d2-42f2-8a10-720d0a3445e4" containerName="mariadb-database-create" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.297999 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13" containerName="glance-log" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.298009 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="4341c972-cfe7-4940-ad91-1f8a4d6138ab" containerName="mariadb-database-create" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.298925 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-t2p4w" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.303483 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-6q6t9" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.303708 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.303902 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.312428 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-t2p4w"] Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.430796 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13","Type":"ContainerDied","Data":"aad08aa9c8a8bd46d5df4b31fe04a16fd58bf8242346cf57129141bfc379535e"} Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.430866 4742 scope.go:117] "RemoveContainer" containerID="6f7180aed7ceb51843eaabd0a032e872d10e578593ffbf206208c7908ccaad69" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.430873 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.457481 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a53c417d-f914-40f7-a7dd-47cafb2b6718-scripts\") pod \"nova-cell0-conductor-db-sync-t2p4w\" (UID: \"a53c417d-f914-40f7-a7dd-47cafb2b6718\") " pod="openstack/nova-cell0-conductor-db-sync-t2p4w" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.457581 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a53c417d-f914-40f7-a7dd-47cafb2b6718-config-data\") pod \"nova-cell0-conductor-db-sync-t2p4w\" (UID: \"a53c417d-f914-40f7-a7dd-47cafb2b6718\") " pod="openstack/nova-cell0-conductor-db-sync-t2p4w" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.457736 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4f2rn\" (UniqueName: \"kubernetes.io/projected/a53c417d-f914-40f7-a7dd-47cafb2b6718-kube-api-access-4f2rn\") pod \"nova-cell0-conductor-db-sync-t2p4w\" (UID: \"a53c417d-f914-40f7-a7dd-47cafb2b6718\") " pod="openstack/nova-cell0-conductor-db-sync-t2p4w" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.457974 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a53c417d-f914-40f7-a7dd-47cafb2b6718-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-t2p4w\" (UID: \"a53c417d-f914-40f7-a7dd-47cafb2b6718\") " pod="openstack/nova-cell0-conductor-db-sync-t2p4w" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.473011 4742 scope.go:117] "RemoveContainer" containerID="18773fd8cfb54fd920520afc01e72e5a3ec52859012030bd87061488e8603a77" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.476652 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.495687 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.509543 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.512303 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.514086 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.514623 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.516045 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.559363 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a53c417d-f914-40f7-a7dd-47cafb2b6718-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-t2p4w\" (UID: \"a53c417d-f914-40f7-a7dd-47cafb2b6718\") " pod="openstack/nova-cell0-conductor-db-sync-t2p4w" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.559418 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a53c417d-f914-40f7-a7dd-47cafb2b6718-scripts\") pod \"nova-cell0-conductor-db-sync-t2p4w\" (UID: \"a53c417d-f914-40f7-a7dd-47cafb2b6718\") " pod="openstack/nova-cell0-conductor-db-sync-t2p4w" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.559485 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a53c417d-f914-40f7-a7dd-47cafb2b6718-config-data\") pod \"nova-cell0-conductor-db-sync-t2p4w\" (UID: \"a53c417d-f914-40f7-a7dd-47cafb2b6718\") " pod="openstack/nova-cell0-conductor-db-sync-t2p4w" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.559509 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4f2rn\" (UniqueName: \"kubernetes.io/projected/a53c417d-f914-40f7-a7dd-47cafb2b6718-kube-api-access-4f2rn\") pod \"nova-cell0-conductor-db-sync-t2p4w\" (UID: \"a53c417d-f914-40f7-a7dd-47cafb2b6718\") " pod="openstack/nova-cell0-conductor-db-sync-t2p4w" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.563806 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a53c417d-f914-40f7-a7dd-47cafb2b6718-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-t2p4w\" (UID: \"a53c417d-f914-40f7-a7dd-47cafb2b6718\") " pod="openstack/nova-cell0-conductor-db-sync-t2p4w" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.563993 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a53c417d-f914-40f7-a7dd-47cafb2b6718-config-data\") pod \"nova-cell0-conductor-db-sync-t2p4w\" (UID: \"a53c417d-f914-40f7-a7dd-47cafb2b6718\") " pod="openstack/nova-cell0-conductor-db-sync-t2p4w" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.564888 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a53c417d-f914-40f7-a7dd-47cafb2b6718-scripts\") pod \"nova-cell0-conductor-db-sync-t2p4w\" (UID: \"a53c417d-f914-40f7-a7dd-47cafb2b6718\") " pod="openstack/nova-cell0-conductor-db-sync-t2p4w" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.575810 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4f2rn\" (UniqueName: \"kubernetes.io/projected/a53c417d-f914-40f7-a7dd-47cafb2b6718-kube-api-access-4f2rn\") pod \"nova-cell0-conductor-db-sync-t2p4w\" (UID: \"a53c417d-f914-40f7-a7dd-47cafb2b6718\") " pod="openstack/nova-cell0-conductor-db-sync-t2p4w" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.615927 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-t2p4w" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.662986 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3428207-2cb4-47d8-b4d8-941c3a4928fb-logs\") pod \"glance-default-internal-api-0\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.663047 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3428207-2cb4-47d8-b4d8-941c3a4928fb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.663095 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3428207-2cb4-47d8-b4d8-941c3a4928fb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.663114 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e3428207-2cb4-47d8-b4d8-941c3a4928fb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.663135 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.663158 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3428207-2cb4-47d8-b4d8-941c3a4928fb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.663176 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e3428207-2cb4-47d8-b4d8-941c3a4928fb-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.663242 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wlzj\" (UniqueName: \"kubernetes.io/projected/e3428207-2cb4-47d8-b4d8-941c3a4928fb-kube-api-access-4wlzj\") pod \"glance-default-internal-api-0\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.767178 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3428207-2cb4-47d8-b4d8-941c3a4928fb-logs\") pod \"glance-default-internal-api-0\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.767254 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3428207-2cb4-47d8-b4d8-941c3a4928fb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.767300 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3428207-2cb4-47d8-b4d8-941c3a4928fb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.767324 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e3428207-2cb4-47d8-b4d8-941c3a4928fb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.767355 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.767385 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3428207-2cb4-47d8-b4d8-941c3a4928fb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.767410 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e3428207-2cb4-47d8-b4d8-941c3a4928fb-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.767494 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wlzj\" (UniqueName: \"kubernetes.io/projected/e3428207-2cb4-47d8-b4d8-941c3a4928fb-kube-api-access-4wlzj\") pod \"glance-default-internal-api-0\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.768035 4742 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-internal-api-0" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.768182 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e3428207-2cb4-47d8-b4d8-941c3a4928fb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.768440 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3428207-2cb4-47d8-b4d8-941c3a4928fb-logs\") pod \"glance-default-internal-api-0\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.774130 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3428207-2cb4-47d8-b4d8-941c3a4928fb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.775480 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3428207-2cb4-47d8-b4d8-941c3a4928fb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.775972 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3428207-2cb4-47d8-b4d8-941c3a4928fb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.778591 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e3428207-2cb4-47d8-b4d8-941c3a4928fb-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.792617 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wlzj\" (UniqueName: \"kubernetes.io/projected/e3428207-2cb4-47d8-b4d8-941c3a4928fb-kube-api-access-4wlzj\") pod \"glance-default-internal-api-0\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.802652 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") " pod="openstack/glance-default-internal-api-0" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.873328 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 06:13:10 crc kubenswrapper[4742]: I1205 06:13:10.952498 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.076118 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cc340c5-1842-4b38-af61-4cc7f24b6118-config-data\") pod \"7cc340c5-1842-4b38-af61-4cc7f24b6118\" (UID: \"7cc340c5-1842-4b38-af61-4cc7f24b6118\") " Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.076239 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5czxs\" (UniqueName: \"kubernetes.io/projected/7cc340c5-1842-4b38-af61-4cc7f24b6118-kube-api-access-5czxs\") pod \"7cc340c5-1842-4b38-af61-4cc7f24b6118\" (UID: \"7cc340c5-1842-4b38-af61-4cc7f24b6118\") " Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.076292 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cc340c5-1842-4b38-af61-4cc7f24b6118-combined-ca-bundle\") pod \"7cc340c5-1842-4b38-af61-4cc7f24b6118\" (UID: \"7cc340c5-1842-4b38-af61-4cc7f24b6118\") " Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.076341 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7cc340c5-1842-4b38-af61-4cc7f24b6118-sg-core-conf-yaml\") pod \"7cc340c5-1842-4b38-af61-4cc7f24b6118\" (UID: \"7cc340c5-1842-4b38-af61-4cc7f24b6118\") " Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.076512 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7cc340c5-1842-4b38-af61-4cc7f24b6118-scripts\") pod \"7cc340c5-1842-4b38-af61-4cc7f24b6118\" (UID: \"7cc340c5-1842-4b38-af61-4cc7f24b6118\") " Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.076544 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7cc340c5-1842-4b38-af61-4cc7f24b6118-log-httpd\") pod \"7cc340c5-1842-4b38-af61-4cc7f24b6118\" (UID: \"7cc340c5-1842-4b38-af61-4cc7f24b6118\") " Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.076598 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7cc340c5-1842-4b38-af61-4cc7f24b6118-run-httpd\") pod \"7cc340c5-1842-4b38-af61-4cc7f24b6118\" (UID: \"7cc340c5-1842-4b38-af61-4cc7f24b6118\") " Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.077578 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7cc340c5-1842-4b38-af61-4cc7f24b6118-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7cc340c5-1842-4b38-af61-4cc7f24b6118" (UID: "7cc340c5-1842-4b38-af61-4cc7f24b6118"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.077648 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7cc340c5-1842-4b38-af61-4cc7f24b6118-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7cc340c5-1842-4b38-af61-4cc7f24b6118" (UID: "7cc340c5-1842-4b38-af61-4cc7f24b6118"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.081362 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7cc340c5-1842-4b38-af61-4cc7f24b6118-scripts" (OuterVolumeSpecName: "scripts") pod "7cc340c5-1842-4b38-af61-4cc7f24b6118" (UID: "7cc340c5-1842-4b38-af61-4cc7f24b6118"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.083265 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7cc340c5-1842-4b38-af61-4cc7f24b6118-kube-api-access-5czxs" (OuterVolumeSpecName: "kube-api-access-5czxs") pod "7cc340c5-1842-4b38-af61-4cc7f24b6118" (UID: "7cc340c5-1842-4b38-af61-4cc7f24b6118"). InnerVolumeSpecName "kube-api-access-5czxs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.121743 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7cc340c5-1842-4b38-af61-4cc7f24b6118-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7cc340c5-1842-4b38-af61-4cc7f24b6118" (UID: "7cc340c5-1842-4b38-af61-4cc7f24b6118"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.165198 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7cc340c5-1842-4b38-af61-4cc7f24b6118-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7cc340c5-1842-4b38-af61-4cc7f24b6118" (UID: "7cc340c5-1842-4b38-af61-4cc7f24b6118"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.173161 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-t2p4w"] Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.178276 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7cc340c5-1842-4b38-af61-4cc7f24b6118-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.178300 4742 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7cc340c5-1842-4b38-af61-4cc7f24b6118-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.178309 4742 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7cc340c5-1842-4b38-af61-4cc7f24b6118-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.178317 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5czxs\" (UniqueName: \"kubernetes.io/projected/7cc340c5-1842-4b38-af61-4cc7f24b6118-kube-api-access-5czxs\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.178326 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cc340c5-1842-4b38-af61-4cc7f24b6118-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.178336 4742 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7cc340c5-1842-4b38-af61-4cc7f24b6118-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.223614 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7cc340c5-1842-4b38-af61-4cc7f24b6118-config-data" (OuterVolumeSpecName: "config-data") pod "7cc340c5-1842-4b38-af61-4cc7f24b6118" (UID: "7cc340c5-1842-4b38-af61-4cc7f24b6118"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.279596 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cc340c5-1842-4b38-af61-4cc7f24b6118-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.417725 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 06:13:11 crc kubenswrapper[4742]: W1205 06:13:11.418444 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode3428207_2cb4_47d8_b4d8_941c3a4928fb.slice/crio-89916a0d2970c36517e36127fd05ef598ea49b28c0fc274bbda2e3eb1c99b5cc WatchSource:0}: Error finding container 89916a0d2970c36517e36127fd05ef598ea49b28c0fc274bbda2e3eb1c99b5cc: Status 404 returned error can't find the container with id 89916a0d2970c36517e36127fd05ef598ea49b28c0fc274bbda2e3eb1c99b5cc Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.440203 4742 generic.go:334] "Generic (PLEG): container finished" podID="7cc340c5-1842-4b38-af61-4cc7f24b6118" containerID="c80302b78ebba3e57bc4582784ddd64945c60da2c78c331f822f74376b24cf37" exitCode=0 Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.440268 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7cc340c5-1842-4b38-af61-4cc7f24b6118","Type":"ContainerDied","Data":"c80302b78ebba3e57bc4582784ddd64945c60da2c78c331f822f74376b24cf37"} Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.440301 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7cc340c5-1842-4b38-af61-4cc7f24b6118","Type":"ContainerDied","Data":"966fe8f06e102d26d475b47e5469cfb31263364443460cbabf3b0f270a2dac94"} Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.440319 4742 scope.go:117] "RemoveContainer" containerID="79c48523795f949a3e88f948381417c01fc32793400c7098935ffcf6596380d5" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.440462 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.446381 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e3428207-2cb4-47d8-b4d8-941c3a4928fb","Type":"ContainerStarted","Data":"89916a0d2970c36517e36127fd05ef598ea49b28c0fc274bbda2e3eb1c99b5cc"} Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.447955 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-t2p4w" event={"ID":"a53c417d-f914-40f7-a7dd-47cafb2b6718","Type":"ContainerStarted","Data":"cc5a37d2cc27e77b417c0529170d4d005e83c9b90560a3c8a8c575bee237f850"} Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.466497 4742 scope.go:117] "RemoveContainer" containerID="4bb2baaec185e8417c830da7f44f14ec9002a157e466150af2923ab43a1992e3" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.491067 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.514797 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.544262 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:13:11 crc kubenswrapper[4742]: E1205 06:13:11.544835 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cc340c5-1842-4b38-af61-4cc7f24b6118" containerName="ceilometer-notification-agent" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.544864 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cc340c5-1842-4b38-af61-4cc7f24b6118" containerName="ceilometer-notification-agent" Dec 05 06:13:11 crc kubenswrapper[4742]: E1205 06:13:11.544886 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cc340c5-1842-4b38-af61-4cc7f24b6118" containerName="sg-core" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.544897 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cc340c5-1842-4b38-af61-4cc7f24b6118" containerName="sg-core" Dec 05 06:13:11 crc kubenswrapper[4742]: E1205 06:13:11.544917 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cc340c5-1842-4b38-af61-4cc7f24b6118" containerName="ceilometer-central-agent" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.544926 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cc340c5-1842-4b38-af61-4cc7f24b6118" containerName="ceilometer-central-agent" Dec 05 06:13:11 crc kubenswrapper[4742]: E1205 06:13:11.544961 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cc340c5-1842-4b38-af61-4cc7f24b6118" containerName="proxy-httpd" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.544969 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cc340c5-1842-4b38-af61-4cc7f24b6118" containerName="proxy-httpd" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.545295 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="7cc340c5-1842-4b38-af61-4cc7f24b6118" containerName="sg-core" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.545328 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="7cc340c5-1842-4b38-af61-4cc7f24b6118" containerName="ceilometer-central-agent" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.545347 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="7cc340c5-1842-4b38-af61-4cc7f24b6118" containerName="proxy-httpd" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.545364 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="7cc340c5-1842-4b38-af61-4cc7f24b6118" containerName="ceilometer-notification-agent" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.546067 4742 scope.go:117] "RemoveContainer" containerID="a3a7c965f36083dda1ad293969a949eb60bda045103f9b40ecc66e28cd7143d3" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.547626 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.549993 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.551342 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.552701 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.582203 4742 scope.go:117] "RemoveContainer" containerID="c80302b78ebba3e57bc4582784ddd64945c60da2c78c331f822f74376b24cf37" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.609089 4742 scope.go:117] "RemoveContainer" containerID="79c48523795f949a3e88f948381417c01fc32793400c7098935ffcf6596380d5" Dec 05 06:13:11 crc kubenswrapper[4742]: E1205 06:13:11.609450 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79c48523795f949a3e88f948381417c01fc32793400c7098935ffcf6596380d5\": container with ID starting with 79c48523795f949a3e88f948381417c01fc32793400c7098935ffcf6596380d5 not found: ID does not exist" containerID="79c48523795f949a3e88f948381417c01fc32793400c7098935ffcf6596380d5" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.609489 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79c48523795f949a3e88f948381417c01fc32793400c7098935ffcf6596380d5"} err="failed to get container status \"79c48523795f949a3e88f948381417c01fc32793400c7098935ffcf6596380d5\": rpc error: code = NotFound desc = could not find container \"79c48523795f949a3e88f948381417c01fc32793400c7098935ffcf6596380d5\": container with ID starting with 79c48523795f949a3e88f948381417c01fc32793400c7098935ffcf6596380d5 not found: ID does not exist" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.609514 4742 scope.go:117] "RemoveContainer" containerID="4bb2baaec185e8417c830da7f44f14ec9002a157e466150af2923ab43a1992e3" Dec 05 06:13:11 crc kubenswrapper[4742]: E1205 06:13:11.609866 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4bb2baaec185e8417c830da7f44f14ec9002a157e466150af2923ab43a1992e3\": container with ID starting with 4bb2baaec185e8417c830da7f44f14ec9002a157e466150af2923ab43a1992e3 not found: ID does not exist" containerID="4bb2baaec185e8417c830da7f44f14ec9002a157e466150af2923ab43a1992e3" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.609962 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4bb2baaec185e8417c830da7f44f14ec9002a157e466150af2923ab43a1992e3"} err="failed to get container status \"4bb2baaec185e8417c830da7f44f14ec9002a157e466150af2923ab43a1992e3\": rpc error: code = NotFound desc = could not find container \"4bb2baaec185e8417c830da7f44f14ec9002a157e466150af2923ab43a1992e3\": container with ID starting with 4bb2baaec185e8417c830da7f44f14ec9002a157e466150af2923ab43a1992e3 not found: ID does not exist" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.610050 4742 scope.go:117] "RemoveContainer" containerID="a3a7c965f36083dda1ad293969a949eb60bda045103f9b40ecc66e28cd7143d3" Dec 05 06:13:11 crc kubenswrapper[4742]: E1205 06:13:11.610967 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3a7c965f36083dda1ad293969a949eb60bda045103f9b40ecc66e28cd7143d3\": container with ID starting with a3a7c965f36083dda1ad293969a949eb60bda045103f9b40ecc66e28cd7143d3 not found: ID does not exist" containerID="a3a7c965f36083dda1ad293969a949eb60bda045103f9b40ecc66e28cd7143d3" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.610992 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3a7c965f36083dda1ad293969a949eb60bda045103f9b40ecc66e28cd7143d3"} err="failed to get container status \"a3a7c965f36083dda1ad293969a949eb60bda045103f9b40ecc66e28cd7143d3\": rpc error: code = NotFound desc = could not find container \"a3a7c965f36083dda1ad293969a949eb60bda045103f9b40ecc66e28cd7143d3\": container with ID starting with a3a7c965f36083dda1ad293969a949eb60bda045103f9b40ecc66e28cd7143d3 not found: ID does not exist" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.611032 4742 scope.go:117] "RemoveContainer" containerID="c80302b78ebba3e57bc4582784ddd64945c60da2c78c331f822f74376b24cf37" Dec 05 06:13:11 crc kubenswrapper[4742]: E1205 06:13:11.611483 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c80302b78ebba3e57bc4582784ddd64945c60da2c78c331f822f74376b24cf37\": container with ID starting with c80302b78ebba3e57bc4582784ddd64945c60da2c78c331f822f74376b24cf37 not found: ID does not exist" containerID="c80302b78ebba3e57bc4582784ddd64945c60da2c78c331f822f74376b24cf37" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.611503 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c80302b78ebba3e57bc4582784ddd64945c60da2c78c331f822f74376b24cf37"} err="failed to get container status \"c80302b78ebba3e57bc4582784ddd64945c60da2c78c331f822f74376b24cf37\": rpc error: code = NotFound desc = could not find container \"c80302b78ebba3e57bc4582784ddd64945c60da2c78c331f822f74376b24cf37\": container with ID starting with c80302b78ebba3e57bc4582784ddd64945c60da2c78c331f822f74376b24cf37 not found: ID does not exist" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.685664 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5144636d-7c40-4885-88c8-9cd38ffb0aec-scripts\") pod \"ceilometer-0\" (UID: \"5144636d-7c40-4885-88c8-9cd38ffb0aec\") " pod="openstack/ceilometer-0" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.685707 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5144636d-7c40-4885-88c8-9cd38ffb0aec-log-httpd\") pod \"ceilometer-0\" (UID: \"5144636d-7c40-4885-88c8-9cd38ffb0aec\") " pod="openstack/ceilometer-0" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.685730 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5144636d-7c40-4885-88c8-9cd38ffb0aec-config-data\") pod \"ceilometer-0\" (UID: \"5144636d-7c40-4885-88c8-9cd38ffb0aec\") " pod="openstack/ceilometer-0" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.685815 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsv69\" (UniqueName: \"kubernetes.io/projected/5144636d-7c40-4885-88c8-9cd38ffb0aec-kube-api-access-gsv69\") pod \"ceilometer-0\" (UID: \"5144636d-7c40-4885-88c8-9cd38ffb0aec\") " pod="openstack/ceilometer-0" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.685836 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5144636d-7c40-4885-88c8-9cd38ffb0aec-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5144636d-7c40-4885-88c8-9cd38ffb0aec\") " pod="openstack/ceilometer-0" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.685855 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5144636d-7c40-4885-88c8-9cd38ffb0aec-run-httpd\") pod \"ceilometer-0\" (UID: \"5144636d-7c40-4885-88c8-9cd38ffb0aec\") " pod="openstack/ceilometer-0" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.685904 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5144636d-7c40-4885-88c8-9cd38ffb0aec-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5144636d-7c40-4885-88c8-9cd38ffb0aec\") " pod="openstack/ceilometer-0" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.787909 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5144636d-7c40-4885-88c8-9cd38ffb0aec-scripts\") pod \"ceilometer-0\" (UID: \"5144636d-7c40-4885-88c8-9cd38ffb0aec\") " pod="openstack/ceilometer-0" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.787955 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5144636d-7c40-4885-88c8-9cd38ffb0aec-log-httpd\") pod \"ceilometer-0\" (UID: \"5144636d-7c40-4885-88c8-9cd38ffb0aec\") " pod="openstack/ceilometer-0" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.787977 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5144636d-7c40-4885-88c8-9cd38ffb0aec-config-data\") pod \"ceilometer-0\" (UID: \"5144636d-7c40-4885-88c8-9cd38ffb0aec\") " pod="openstack/ceilometer-0" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.788079 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsv69\" (UniqueName: \"kubernetes.io/projected/5144636d-7c40-4885-88c8-9cd38ffb0aec-kube-api-access-gsv69\") pod \"ceilometer-0\" (UID: \"5144636d-7c40-4885-88c8-9cd38ffb0aec\") " pod="openstack/ceilometer-0" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.788101 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5144636d-7c40-4885-88c8-9cd38ffb0aec-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5144636d-7c40-4885-88c8-9cd38ffb0aec\") " pod="openstack/ceilometer-0" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.788119 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5144636d-7c40-4885-88c8-9cd38ffb0aec-run-httpd\") pod \"ceilometer-0\" (UID: \"5144636d-7c40-4885-88c8-9cd38ffb0aec\") " pod="openstack/ceilometer-0" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.788218 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5144636d-7c40-4885-88c8-9cd38ffb0aec-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5144636d-7c40-4885-88c8-9cd38ffb0aec\") " pod="openstack/ceilometer-0" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.789018 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5144636d-7c40-4885-88c8-9cd38ffb0aec-log-httpd\") pod \"ceilometer-0\" (UID: \"5144636d-7c40-4885-88c8-9cd38ffb0aec\") " pod="openstack/ceilometer-0" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.789018 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5144636d-7c40-4885-88c8-9cd38ffb0aec-run-httpd\") pod \"ceilometer-0\" (UID: \"5144636d-7c40-4885-88c8-9cd38ffb0aec\") " pod="openstack/ceilometer-0" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.793150 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5144636d-7c40-4885-88c8-9cd38ffb0aec-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5144636d-7c40-4885-88c8-9cd38ffb0aec\") " pod="openstack/ceilometer-0" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.793590 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5144636d-7c40-4885-88c8-9cd38ffb0aec-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5144636d-7c40-4885-88c8-9cd38ffb0aec\") " pod="openstack/ceilometer-0" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.794237 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5144636d-7c40-4885-88c8-9cd38ffb0aec-scripts\") pod \"ceilometer-0\" (UID: \"5144636d-7c40-4885-88c8-9cd38ffb0aec\") " pod="openstack/ceilometer-0" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.794541 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5144636d-7c40-4885-88c8-9cd38ffb0aec-config-data\") pod \"ceilometer-0\" (UID: \"5144636d-7c40-4885-88c8-9cd38ffb0aec\") " pod="openstack/ceilometer-0" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.804875 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsv69\" (UniqueName: \"kubernetes.io/projected/5144636d-7c40-4885-88c8-9cd38ffb0aec-kube-api-access-gsv69\") pod \"ceilometer-0\" (UID: \"5144636d-7c40-4885-88c8-9cd38ffb0aec\") " pod="openstack/ceilometer-0" Dec 05 06:13:11 crc kubenswrapper[4742]: I1205 06:13:11.879049 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 06:13:12 crc kubenswrapper[4742]: I1205 06:13:12.270760 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:13:12 crc kubenswrapper[4742]: I1205 06:13:12.313742 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:13:12 crc kubenswrapper[4742]: I1205 06:13:12.396140 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7cc340c5-1842-4b38-af61-4cc7f24b6118" path="/var/lib/kubelet/pods/7cc340c5-1842-4b38-af61-4cc7f24b6118/volumes" Dec 05 06:13:12 crc kubenswrapper[4742]: I1205 06:13:12.397660 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13" path="/var/lib/kubelet/pods/cdf66b2c-d07c-4ab7-a4b6-d4b1187ead13/volumes" Dec 05 06:13:12 crc kubenswrapper[4742]: I1205 06:13:12.461031 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5144636d-7c40-4885-88c8-9cd38ffb0aec","Type":"ContainerStarted","Data":"620317d2c284e80093ebb67a3e05478e441c0da089f5ab91a69c1cf4e3992828"} Dec 05 06:13:12 crc kubenswrapper[4742]: I1205 06:13:12.462981 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e3428207-2cb4-47d8-b4d8-941c3a4928fb","Type":"ContainerStarted","Data":"c578f580ac4c94f28399a0f7e39da62ca4fb8496169c7001d2053863082caf1f"} Dec 05 06:13:13 crc kubenswrapper[4742]: I1205 06:13:13.479667 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e3428207-2cb4-47d8-b4d8-941c3a4928fb","Type":"ContainerStarted","Data":"b8f02737722d7ebc14c897ea39f901ead1646c8d5e8658a44265bfe41044eed8"} Dec 05 06:13:13 crc kubenswrapper[4742]: I1205 06:13:13.484256 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5144636d-7c40-4885-88c8-9cd38ffb0aec","Type":"ContainerStarted","Data":"c3365063090b1e3f7d982e2e90e75fe225b1bb52c62397189e4b8cd2477682c9"} Dec 05 06:13:13 crc kubenswrapper[4742]: I1205 06:13:13.503602 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.503580893 podStartE2EDuration="3.503580893s" podCreationTimestamp="2025-12-05 06:13:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:13:13.501012965 +0000 UTC m=+1269.413148027" watchObservedRunningTime="2025-12-05 06:13:13.503580893 +0000 UTC m=+1269.415715955" Dec 05 06:13:14 crc kubenswrapper[4742]: I1205 06:13:14.499106 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5144636d-7c40-4885-88c8-9cd38ffb0aec","Type":"ContainerStarted","Data":"9153569f129b502147b3681bbf8397d43b13c108bc6bc0de16fcbf17e269a000"} Dec 05 06:13:15 crc kubenswrapper[4742]: I1205 06:13:15.828530 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 06:13:15 crc kubenswrapper[4742]: I1205 06:13:15.828853 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 06:13:15 crc kubenswrapper[4742]: I1205 06:13:15.872081 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 06:13:15 crc kubenswrapper[4742]: I1205 06:13:15.877082 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 06:13:16 crc kubenswrapper[4742]: I1205 06:13:16.521504 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 06:13:16 crc kubenswrapper[4742]: I1205 06:13:16.521792 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 06:13:18 crc kubenswrapper[4742]: I1205 06:13:18.328487 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 06:13:18 crc kubenswrapper[4742]: I1205 06:13:18.339837 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 06:13:19 crc kubenswrapper[4742]: I1205 06:13:19.563089 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-t2p4w" event={"ID":"a53c417d-f914-40f7-a7dd-47cafb2b6718","Type":"ContainerStarted","Data":"e6ae9e5aa6d032b3ba8965356ae8b4ff4257705bc6f7ae6526108bfef473a057"} Dec 05 06:13:19 crc kubenswrapper[4742]: I1205 06:13:19.568854 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5144636d-7c40-4885-88c8-9cd38ffb0aec","Type":"ContainerStarted","Data":"f8466eb82b192c0055e874da58584ec72bb8730f60482f218c0e89588088a7a8"} Dec 05 06:13:19 crc kubenswrapper[4742]: I1205 06:13:19.583612 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-t2p4w" podStartSLOduration=2.331023852 podStartE2EDuration="9.583593497s" podCreationTimestamp="2025-12-05 06:13:10 +0000 UTC" firstStartedPulling="2025-12-05 06:13:11.181622086 +0000 UTC m=+1267.093757148" lastFinishedPulling="2025-12-05 06:13:18.434191731 +0000 UTC m=+1274.346326793" observedRunningTime="2025-12-05 06:13:19.580035892 +0000 UTC m=+1275.492170984" watchObservedRunningTime="2025-12-05 06:13:19.583593497 +0000 UTC m=+1275.495728559" Dec 05 06:13:20 crc kubenswrapper[4742]: I1205 06:13:20.602687 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5144636d-7c40-4885-88c8-9cd38ffb0aec","Type":"ContainerStarted","Data":"1e12cf6a6d93e7b2a6f0ee7840e81a5d06911162d59f301cc2dba1d9148eea42"} Dec 05 06:13:20 crc kubenswrapper[4742]: I1205 06:13:20.602964 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5144636d-7c40-4885-88c8-9cd38ffb0aec" containerName="ceilometer-central-agent" containerID="cri-o://c3365063090b1e3f7d982e2e90e75fe225b1bb52c62397189e4b8cd2477682c9" gracePeriod=30 Dec 05 06:13:20 crc kubenswrapper[4742]: I1205 06:13:20.603131 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5144636d-7c40-4885-88c8-9cd38ffb0aec" containerName="proxy-httpd" containerID="cri-o://1e12cf6a6d93e7b2a6f0ee7840e81a5d06911162d59f301cc2dba1d9148eea42" gracePeriod=30 Dec 05 06:13:20 crc kubenswrapper[4742]: I1205 06:13:20.603177 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5144636d-7c40-4885-88c8-9cd38ffb0aec" containerName="ceilometer-notification-agent" containerID="cri-o://9153569f129b502147b3681bbf8397d43b13c108bc6bc0de16fcbf17e269a000" gracePeriod=30 Dec 05 06:13:20 crc kubenswrapper[4742]: I1205 06:13:20.603236 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5144636d-7c40-4885-88c8-9cd38ffb0aec" containerName="sg-core" containerID="cri-o://f8466eb82b192c0055e874da58584ec72bb8730f60482f218c0e89588088a7a8" gracePeriod=30 Dec 05 06:13:20 crc kubenswrapper[4742]: I1205 06:13:20.649165 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.81806652 podStartE2EDuration="9.64915022s" podCreationTimestamp="2025-12-05 06:13:11 +0000 UTC" firstStartedPulling="2025-12-05 06:13:12.327828566 +0000 UTC m=+1268.239963628" lastFinishedPulling="2025-12-05 06:13:20.158912246 +0000 UTC m=+1276.071047328" observedRunningTime="2025-12-05 06:13:20.647739672 +0000 UTC m=+1276.559874744" watchObservedRunningTime="2025-12-05 06:13:20.64915022 +0000 UTC m=+1276.561285282" Dec 05 06:13:20 crc kubenswrapper[4742]: I1205 06:13:20.874440 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 06:13:20 crc kubenswrapper[4742]: I1205 06:13:20.874508 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 06:13:20 crc kubenswrapper[4742]: I1205 06:13:20.913092 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 06:13:20 crc kubenswrapper[4742]: I1205 06:13:20.926714 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.307689 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.411670 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5144636d-7c40-4885-88c8-9cd38ffb0aec-config-data\") pod \"5144636d-7c40-4885-88c8-9cd38ffb0aec\" (UID: \"5144636d-7c40-4885-88c8-9cd38ffb0aec\") " Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.411802 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5144636d-7c40-4885-88c8-9cd38ffb0aec-run-httpd\") pod \"5144636d-7c40-4885-88c8-9cd38ffb0aec\" (UID: \"5144636d-7c40-4885-88c8-9cd38ffb0aec\") " Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.411827 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5144636d-7c40-4885-88c8-9cd38ffb0aec-scripts\") pod \"5144636d-7c40-4885-88c8-9cd38ffb0aec\" (UID: \"5144636d-7c40-4885-88c8-9cd38ffb0aec\") " Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.411857 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5144636d-7c40-4885-88c8-9cd38ffb0aec-log-httpd\") pod \"5144636d-7c40-4885-88c8-9cd38ffb0aec\" (UID: \"5144636d-7c40-4885-88c8-9cd38ffb0aec\") " Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.411906 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5144636d-7c40-4885-88c8-9cd38ffb0aec-sg-core-conf-yaml\") pod \"5144636d-7c40-4885-88c8-9cd38ffb0aec\" (UID: \"5144636d-7c40-4885-88c8-9cd38ffb0aec\") " Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.411929 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5144636d-7c40-4885-88c8-9cd38ffb0aec-combined-ca-bundle\") pod \"5144636d-7c40-4885-88c8-9cd38ffb0aec\" (UID: \"5144636d-7c40-4885-88c8-9cd38ffb0aec\") " Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.411959 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gsv69\" (UniqueName: \"kubernetes.io/projected/5144636d-7c40-4885-88c8-9cd38ffb0aec-kube-api-access-gsv69\") pod \"5144636d-7c40-4885-88c8-9cd38ffb0aec\" (UID: \"5144636d-7c40-4885-88c8-9cd38ffb0aec\") " Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.412450 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5144636d-7c40-4885-88c8-9cd38ffb0aec-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "5144636d-7c40-4885-88c8-9cd38ffb0aec" (UID: "5144636d-7c40-4885-88c8-9cd38ffb0aec"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.413083 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5144636d-7c40-4885-88c8-9cd38ffb0aec-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "5144636d-7c40-4885-88c8-9cd38ffb0aec" (UID: "5144636d-7c40-4885-88c8-9cd38ffb0aec"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.418348 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5144636d-7c40-4885-88c8-9cd38ffb0aec-scripts" (OuterVolumeSpecName: "scripts") pod "5144636d-7c40-4885-88c8-9cd38ffb0aec" (UID: "5144636d-7c40-4885-88c8-9cd38ffb0aec"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.426328 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5144636d-7c40-4885-88c8-9cd38ffb0aec-kube-api-access-gsv69" (OuterVolumeSpecName: "kube-api-access-gsv69") pod "5144636d-7c40-4885-88c8-9cd38ffb0aec" (UID: "5144636d-7c40-4885-88c8-9cd38ffb0aec"). InnerVolumeSpecName "kube-api-access-gsv69". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.444531 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5144636d-7c40-4885-88c8-9cd38ffb0aec-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "5144636d-7c40-4885-88c8-9cd38ffb0aec" (UID: "5144636d-7c40-4885-88c8-9cd38ffb0aec"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.517925 4742 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5144636d-7c40-4885-88c8-9cd38ffb0aec-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.517971 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5144636d-7c40-4885-88c8-9cd38ffb0aec-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.517992 4742 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5144636d-7c40-4885-88c8-9cd38ffb0aec-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.518010 4742 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5144636d-7c40-4885-88c8-9cd38ffb0aec-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.518096 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gsv69\" (UniqueName: \"kubernetes.io/projected/5144636d-7c40-4885-88c8-9cd38ffb0aec-kube-api-access-gsv69\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.520994 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5144636d-7c40-4885-88c8-9cd38ffb0aec-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5144636d-7c40-4885-88c8-9cd38ffb0aec" (UID: "5144636d-7c40-4885-88c8-9cd38ffb0aec"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.532693 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5144636d-7c40-4885-88c8-9cd38ffb0aec-config-data" (OuterVolumeSpecName: "config-data") pod "5144636d-7c40-4885-88c8-9cd38ffb0aec" (UID: "5144636d-7c40-4885-88c8-9cd38ffb0aec"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.618343 4742 generic.go:334] "Generic (PLEG): container finished" podID="5144636d-7c40-4885-88c8-9cd38ffb0aec" containerID="1e12cf6a6d93e7b2a6f0ee7840e81a5d06911162d59f301cc2dba1d9148eea42" exitCode=0 Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.618736 4742 generic.go:334] "Generic (PLEG): container finished" podID="5144636d-7c40-4885-88c8-9cd38ffb0aec" containerID="f8466eb82b192c0055e874da58584ec72bb8730f60482f218c0e89588088a7a8" exitCode=2 Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.618458 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5144636d-7c40-4885-88c8-9cd38ffb0aec","Type":"ContainerDied","Data":"1e12cf6a6d93e7b2a6f0ee7840e81a5d06911162d59f301cc2dba1d9148eea42"} Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.618811 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5144636d-7c40-4885-88c8-9cd38ffb0aec","Type":"ContainerDied","Data":"f8466eb82b192c0055e874da58584ec72bb8730f60482f218c0e89588088a7a8"} Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.618847 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5144636d-7c40-4885-88c8-9cd38ffb0aec","Type":"ContainerDied","Data":"9153569f129b502147b3681bbf8397d43b13c108bc6bc0de16fcbf17e269a000"} Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.618469 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.618889 4742 scope.go:117] "RemoveContainer" containerID="1e12cf6a6d93e7b2a6f0ee7840e81a5d06911162d59f301cc2dba1d9148eea42" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.618762 4742 generic.go:334] "Generic (PLEG): container finished" podID="5144636d-7c40-4885-88c8-9cd38ffb0aec" containerID="9153569f129b502147b3681bbf8397d43b13c108bc6bc0de16fcbf17e269a000" exitCode=0 Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.618978 4742 generic.go:334] "Generic (PLEG): container finished" podID="5144636d-7c40-4885-88c8-9cd38ffb0aec" containerID="c3365063090b1e3f7d982e2e90e75fe225b1bb52c62397189e4b8cd2477682c9" exitCode=0 Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.619018 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5144636d-7c40-4885-88c8-9cd38ffb0aec","Type":"ContainerDied","Data":"c3365063090b1e3f7d982e2e90e75fe225b1bb52c62397189e4b8cd2477682c9"} Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.619049 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5144636d-7c40-4885-88c8-9cd38ffb0aec","Type":"ContainerDied","Data":"620317d2c284e80093ebb67a3e05478e441c0da089f5ab91a69c1cf4e3992828"} Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.620295 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.620367 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.623026 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5144636d-7c40-4885-88c8-9cd38ffb0aec-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.623081 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5144636d-7c40-4885-88c8-9cd38ffb0aec-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.657970 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.660752 4742 scope.go:117] "RemoveContainer" containerID="f8466eb82b192c0055e874da58584ec72bb8730f60482f218c0e89588088a7a8" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.674086 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.685325 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:13:21 crc kubenswrapper[4742]: E1205 06:13:21.685697 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5144636d-7c40-4885-88c8-9cd38ffb0aec" containerName="ceilometer-notification-agent" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.685714 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="5144636d-7c40-4885-88c8-9cd38ffb0aec" containerName="ceilometer-notification-agent" Dec 05 06:13:21 crc kubenswrapper[4742]: E1205 06:13:21.685727 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5144636d-7c40-4885-88c8-9cd38ffb0aec" containerName="proxy-httpd" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.685734 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="5144636d-7c40-4885-88c8-9cd38ffb0aec" containerName="proxy-httpd" Dec 05 06:13:21 crc kubenswrapper[4742]: E1205 06:13:21.685749 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5144636d-7c40-4885-88c8-9cd38ffb0aec" containerName="ceilometer-central-agent" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.685756 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="5144636d-7c40-4885-88c8-9cd38ffb0aec" containerName="ceilometer-central-agent" Dec 05 06:13:21 crc kubenswrapper[4742]: E1205 06:13:21.685767 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5144636d-7c40-4885-88c8-9cd38ffb0aec" containerName="sg-core" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.685773 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="5144636d-7c40-4885-88c8-9cd38ffb0aec" containerName="sg-core" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.685951 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="5144636d-7c40-4885-88c8-9cd38ffb0aec" containerName="proxy-httpd" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.685970 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="5144636d-7c40-4885-88c8-9cd38ffb0aec" containerName="ceilometer-central-agent" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.685984 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="5144636d-7c40-4885-88c8-9cd38ffb0aec" containerName="sg-core" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.685996 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="5144636d-7c40-4885-88c8-9cd38ffb0aec" containerName="ceilometer-notification-agent" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.688196 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.690801 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.691258 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.695572 4742 scope.go:117] "RemoveContainer" containerID="9153569f129b502147b3681bbf8397d43b13c108bc6bc0de16fcbf17e269a000" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.703168 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.727711 4742 scope.go:117] "RemoveContainer" containerID="c3365063090b1e3f7d982e2e90e75fe225b1bb52c62397189e4b8cd2477682c9" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.747574 4742 scope.go:117] "RemoveContainer" containerID="1e12cf6a6d93e7b2a6f0ee7840e81a5d06911162d59f301cc2dba1d9148eea42" Dec 05 06:13:21 crc kubenswrapper[4742]: E1205 06:13:21.748036 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e12cf6a6d93e7b2a6f0ee7840e81a5d06911162d59f301cc2dba1d9148eea42\": container with ID starting with 1e12cf6a6d93e7b2a6f0ee7840e81a5d06911162d59f301cc2dba1d9148eea42 not found: ID does not exist" containerID="1e12cf6a6d93e7b2a6f0ee7840e81a5d06911162d59f301cc2dba1d9148eea42" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.748087 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e12cf6a6d93e7b2a6f0ee7840e81a5d06911162d59f301cc2dba1d9148eea42"} err="failed to get container status \"1e12cf6a6d93e7b2a6f0ee7840e81a5d06911162d59f301cc2dba1d9148eea42\": rpc error: code = NotFound desc = could not find container \"1e12cf6a6d93e7b2a6f0ee7840e81a5d06911162d59f301cc2dba1d9148eea42\": container with ID starting with 1e12cf6a6d93e7b2a6f0ee7840e81a5d06911162d59f301cc2dba1d9148eea42 not found: ID does not exist" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.748113 4742 scope.go:117] "RemoveContainer" containerID="f8466eb82b192c0055e874da58584ec72bb8730f60482f218c0e89588088a7a8" Dec 05 06:13:21 crc kubenswrapper[4742]: E1205 06:13:21.748536 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8466eb82b192c0055e874da58584ec72bb8730f60482f218c0e89588088a7a8\": container with ID starting with f8466eb82b192c0055e874da58584ec72bb8730f60482f218c0e89588088a7a8 not found: ID does not exist" containerID="f8466eb82b192c0055e874da58584ec72bb8730f60482f218c0e89588088a7a8" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.748646 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8466eb82b192c0055e874da58584ec72bb8730f60482f218c0e89588088a7a8"} err="failed to get container status \"f8466eb82b192c0055e874da58584ec72bb8730f60482f218c0e89588088a7a8\": rpc error: code = NotFound desc = could not find container \"f8466eb82b192c0055e874da58584ec72bb8730f60482f218c0e89588088a7a8\": container with ID starting with f8466eb82b192c0055e874da58584ec72bb8730f60482f218c0e89588088a7a8 not found: ID does not exist" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.748685 4742 scope.go:117] "RemoveContainer" containerID="9153569f129b502147b3681bbf8397d43b13c108bc6bc0de16fcbf17e269a000" Dec 05 06:13:21 crc kubenswrapper[4742]: E1205 06:13:21.749042 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9153569f129b502147b3681bbf8397d43b13c108bc6bc0de16fcbf17e269a000\": container with ID starting with 9153569f129b502147b3681bbf8397d43b13c108bc6bc0de16fcbf17e269a000 not found: ID does not exist" containerID="9153569f129b502147b3681bbf8397d43b13c108bc6bc0de16fcbf17e269a000" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.749085 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9153569f129b502147b3681bbf8397d43b13c108bc6bc0de16fcbf17e269a000"} err="failed to get container status \"9153569f129b502147b3681bbf8397d43b13c108bc6bc0de16fcbf17e269a000\": rpc error: code = NotFound desc = could not find container \"9153569f129b502147b3681bbf8397d43b13c108bc6bc0de16fcbf17e269a000\": container with ID starting with 9153569f129b502147b3681bbf8397d43b13c108bc6bc0de16fcbf17e269a000 not found: ID does not exist" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.749103 4742 scope.go:117] "RemoveContainer" containerID="c3365063090b1e3f7d982e2e90e75fe225b1bb52c62397189e4b8cd2477682c9" Dec 05 06:13:21 crc kubenswrapper[4742]: E1205 06:13:21.749308 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3365063090b1e3f7d982e2e90e75fe225b1bb52c62397189e4b8cd2477682c9\": container with ID starting with c3365063090b1e3f7d982e2e90e75fe225b1bb52c62397189e4b8cd2477682c9 not found: ID does not exist" containerID="c3365063090b1e3f7d982e2e90e75fe225b1bb52c62397189e4b8cd2477682c9" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.749332 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3365063090b1e3f7d982e2e90e75fe225b1bb52c62397189e4b8cd2477682c9"} err="failed to get container status \"c3365063090b1e3f7d982e2e90e75fe225b1bb52c62397189e4b8cd2477682c9\": rpc error: code = NotFound desc = could not find container \"c3365063090b1e3f7d982e2e90e75fe225b1bb52c62397189e4b8cd2477682c9\": container with ID starting with c3365063090b1e3f7d982e2e90e75fe225b1bb52c62397189e4b8cd2477682c9 not found: ID does not exist" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.749344 4742 scope.go:117] "RemoveContainer" containerID="1e12cf6a6d93e7b2a6f0ee7840e81a5d06911162d59f301cc2dba1d9148eea42" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.749531 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e12cf6a6d93e7b2a6f0ee7840e81a5d06911162d59f301cc2dba1d9148eea42"} err="failed to get container status \"1e12cf6a6d93e7b2a6f0ee7840e81a5d06911162d59f301cc2dba1d9148eea42\": rpc error: code = NotFound desc = could not find container \"1e12cf6a6d93e7b2a6f0ee7840e81a5d06911162d59f301cc2dba1d9148eea42\": container with ID starting with 1e12cf6a6d93e7b2a6f0ee7840e81a5d06911162d59f301cc2dba1d9148eea42 not found: ID does not exist" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.749558 4742 scope.go:117] "RemoveContainer" containerID="f8466eb82b192c0055e874da58584ec72bb8730f60482f218c0e89588088a7a8" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.749791 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8466eb82b192c0055e874da58584ec72bb8730f60482f218c0e89588088a7a8"} err="failed to get container status \"f8466eb82b192c0055e874da58584ec72bb8730f60482f218c0e89588088a7a8\": rpc error: code = NotFound desc = could not find container \"f8466eb82b192c0055e874da58584ec72bb8730f60482f218c0e89588088a7a8\": container with ID starting with f8466eb82b192c0055e874da58584ec72bb8730f60482f218c0e89588088a7a8 not found: ID does not exist" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.749816 4742 scope.go:117] "RemoveContainer" containerID="9153569f129b502147b3681bbf8397d43b13c108bc6bc0de16fcbf17e269a000" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.750512 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9153569f129b502147b3681bbf8397d43b13c108bc6bc0de16fcbf17e269a000"} err="failed to get container status \"9153569f129b502147b3681bbf8397d43b13c108bc6bc0de16fcbf17e269a000\": rpc error: code = NotFound desc = could not find container \"9153569f129b502147b3681bbf8397d43b13c108bc6bc0de16fcbf17e269a000\": container with ID starting with 9153569f129b502147b3681bbf8397d43b13c108bc6bc0de16fcbf17e269a000 not found: ID does not exist" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.750536 4742 scope.go:117] "RemoveContainer" containerID="c3365063090b1e3f7d982e2e90e75fe225b1bb52c62397189e4b8cd2477682c9" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.750886 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3365063090b1e3f7d982e2e90e75fe225b1bb52c62397189e4b8cd2477682c9"} err="failed to get container status \"c3365063090b1e3f7d982e2e90e75fe225b1bb52c62397189e4b8cd2477682c9\": rpc error: code = NotFound desc = could not find container \"c3365063090b1e3f7d982e2e90e75fe225b1bb52c62397189e4b8cd2477682c9\": container with ID starting with c3365063090b1e3f7d982e2e90e75fe225b1bb52c62397189e4b8cd2477682c9 not found: ID does not exist" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.750913 4742 scope.go:117] "RemoveContainer" containerID="1e12cf6a6d93e7b2a6f0ee7840e81a5d06911162d59f301cc2dba1d9148eea42" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.752182 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e12cf6a6d93e7b2a6f0ee7840e81a5d06911162d59f301cc2dba1d9148eea42"} err="failed to get container status \"1e12cf6a6d93e7b2a6f0ee7840e81a5d06911162d59f301cc2dba1d9148eea42\": rpc error: code = NotFound desc = could not find container \"1e12cf6a6d93e7b2a6f0ee7840e81a5d06911162d59f301cc2dba1d9148eea42\": container with ID starting with 1e12cf6a6d93e7b2a6f0ee7840e81a5d06911162d59f301cc2dba1d9148eea42 not found: ID does not exist" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.752220 4742 scope.go:117] "RemoveContainer" containerID="f8466eb82b192c0055e874da58584ec72bb8730f60482f218c0e89588088a7a8" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.752677 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8466eb82b192c0055e874da58584ec72bb8730f60482f218c0e89588088a7a8"} err="failed to get container status \"f8466eb82b192c0055e874da58584ec72bb8730f60482f218c0e89588088a7a8\": rpc error: code = NotFound desc = could not find container \"f8466eb82b192c0055e874da58584ec72bb8730f60482f218c0e89588088a7a8\": container with ID starting with f8466eb82b192c0055e874da58584ec72bb8730f60482f218c0e89588088a7a8 not found: ID does not exist" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.752707 4742 scope.go:117] "RemoveContainer" containerID="9153569f129b502147b3681bbf8397d43b13c108bc6bc0de16fcbf17e269a000" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.753305 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9153569f129b502147b3681bbf8397d43b13c108bc6bc0de16fcbf17e269a000"} err="failed to get container status \"9153569f129b502147b3681bbf8397d43b13c108bc6bc0de16fcbf17e269a000\": rpc error: code = NotFound desc = could not find container \"9153569f129b502147b3681bbf8397d43b13c108bc6bc0de16fcbf17e269a000\": container with ID starting with 9153569f129b502147b3681bbf8397d43b13c108bc6bc0de16fcbf17e269a000 not found: ID does not exist" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.753373 4742 scope.go:117] "RemoveContainer" containerID="c3365063090b1e3f7d982e2e90e75fe225b1bb52c62397189e4b8cd2477682c9" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.753902 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3365063090b1e3f7d982e2e90e75fe225b1bb52c62397189e4b8cd2477682c9"} err="failed to get container status \"c3365063090b1e3f7d982e2e90e75fe225b1bb52c62397189e4b8cd2477682c9\": rpc error: code = NotFound desc = could not find container \"c3365063090b1e3f7d982e2e90e75fe225b1bb52c62397189e4b8cd2477682c9\": container with ID starting with c3365063090b1e3f7d982e2e90e75fe225b1bb52c62397189e4b8cd2477682c9 not found: ID does not exist" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.753929 4742 scope.go:117] "RemoveContainer" containerID="1e12cf6a6d93e7b2a6f0ee7840e81a5d06911162d59f301cc2dba1d9148eea42" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.754201 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e12cf6a6d93e7b2a6f0ee7840e81a5d06911162d59f301cc2dba1d9148eea42"} err="failed to get container status \"1e12cf6a6d93e7b2a6f0ee7840e81a5d06911162d59f301cc2dba1d9148eea42\": rpc error: code = NotFound desc = could not find container \"1e12cf6a6d93e7b2a6f0ee7840e81a5d06911162d59f301cc2dba1d9148eea42\": container with ID starting with 1e12cf6a6d93e7b2a6f0ee7840e81a5d06911162d59f301cc2dba1d9148eea42 not found: ID does not exist" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.754224 4742 scope.go:117] "RemoveContainer" containerID="f8466eb82b192c0055e874da58584ec72bb8730f60482f218c0e89588088a7a8" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.754488 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8466eb82b192c0055e874da58584ec72bb8730f60482f218c0e89588088a7a8"} err="failed to get container status \"f8466eb82b192c0055e874da58584ec72bb8730f60482f218c0e89588088a7a8\": rpc error: code = NotFound desc = could not find container \"f8466eb82b192c0055e874da58584ec72bb8730f60482f218c0e89588088a7a8\": container with ID starting with f8466eb82b192c0055e874da58584ec72bb8730f60482f218c0e89588088a7a8 not found: ID does not exist" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.754515 4742 scope.go:117] "RemoveContainer" containerID="9153569f129b502147b3681bbf8397d43b13c108bc6bc0de16fcbf17e269a000" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.754750 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9153569f129b502147b3681bbf8397d43b13c108bc6bc0de16fcbf17e269a000"} err="failed to get container status \"9153569f129b502147b3681bbf8397d43b13c108bc6bc0de16fcbf17e269a000\": rpc error: code = NotFound desc = could not find container \"9153569f129b502147b3681bbf8397d43b13c108bc6bc0de16fcbf17e269a000\": container with ID starting with 9153569f129b502147b3681bbf8397d43b13c108bc6bc0de16fcbf17e269a000 not found: ID does not exist" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.754772 4742 scope.go:117] "RemoveContainer" containerID="c3365063090b1e3f7d982e2e90e75fe225b1bb52c62397189e4b8cd2477682c9" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.755109 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3365063090b1e3f7d982e2e90e75fe225b1bb52c62397189e4b8cd2477682c9"} err="failed to get container status \"c3365063090b1e3f7d982e2e90e75fe225b1bb52c62397189e4b8cd2477682c9\": rpc error: code = NotFound desc = could not find container \"c3365063090b1e3f7d982e2e90e75fe225b1bb52c62397189e4b8cd2477682c9\": container with ID starting with c3365063090b1e3f7d982e2e90e75fe225b1bb52c62397189e4b8cd2477682c9 not found: ID does not exist" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.826906 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b5415e6-320d-4957-b6f9-045c625ed624-scripts\") pod \"ceilometer-0\" (UID: \"6b5415e6-320d-4957-b6f9-045c625ed624\") " pod="openstack/ceilometer-0" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.826960 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rcn6\" (UniqueName: \"kubernetes.io/projected/6b5415e6-320d-4957-b6f9-045c625ed624-kube-api-access-6rcn6\") pod \"ceilometer-0\" (UID: \"6b5415e6-320d-4957-b6f9-045c625ed624\") " pod="openstack/ceilometer-0" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.827007 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b5415e6-320d-4957-b6f9-045c625ed624-log-httpd\") pod \"ceilometer-0\" (UID: \"6b5415e6-320d-4957-b6f9-045c625ed624\") " pod="openstack/ceilometer-0" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.827333 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b5415e6-320d-4957-b6f9-045c625ed624-run-httpd\") pod \"ceilometer-0\" (UID: \"6b5415e6-320d-4957-b6f9-045c625ed624\") " pod="openstack/ceilometer-0" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.827393 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6b5415e6-320d-4957-b6f9-045c625ed624-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6b5415e6-320d-4957-b6f9-045c625ed624\") " pod="openstack/ceilometer-0" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.827589 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b5415e6-320d-4957-b6f9-045c625ed624-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6b5415e6-320d-4957-b6f9-045c625ed624\") " pod="openstack/ceilometer-0" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.827691 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b5415e6-320d-4957-b6f9-045c625ed624-config-data\") pod \"ceilometer-0\" (UID: \"6b5415e6-320d-4957-b6f9-045c625ed624\") " pod="openstack/ceilometer-0" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.929708 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b5415e6-320d-4957-b6f9-045c625ed624-run-httpd\") pod \"ceilometer-0\" (UID: \"6b5415e6-320d-4957-b6f9-045c625ed624\") " pod="openstack/ceilometer-0" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.929793 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6b5415e6-320d-4957-b6f9-045c625ed624-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6b5415e6-320d-4957-b6f9-045c625ed624\") " pod="openstack/ceilometer-0" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.929867 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b5415e6-320d-4957-b6f9-045c625ed624-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6b5415e6-320d-4957-b6f9-045c625ed624\") " pod="openstack/ceilometer-0" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.929950 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b5415e6-320d-4957-b6f9-045c625ed624-config-data\") pod \"ceilometer-0\" (UID: \"6b5415e6-320d-4957-b6f9-045c625ed624\") " pod="openstack/ceilometer-0" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.930052 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b5415e6-320d-4957-b6f9-045c625ed624-scripts\") pod \"ceilometer-0\" (UID: \"6b5415e6-320d-4957-b6f9-045c625ed624\") " pod="openstack/ceilometer-0" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.930117 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rcn6\" (UniqueName: \"kubernetes.io/projected/6b5415e6-320d-4957-b6f9-045c625ed624-kube-api-access-6rcn6\") pod \"ceilometer-0\" (UID: \"6b5415e6-320d-4957-b6f9-045c625ed624\") " pod="openstack/ceilometer-0" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.930193 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b5415e6-320d-4957-b6f9-045c625ed624-log-httpd\") pod \"ceilometer-0\" (UID: \"6b5415e6-320d-4957-b6f9-045c625ed624\") " pod="openstack/ceilometer-0" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.930365 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b5415e6-320d-4957-b6f9-045c625ed624-run-httpd\") pod \"ceilometer-0\" (UID: \"6b5415e6-320d-4957-b6f9-045c625ed624\") " pod="openstack/ceilometer-0" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.930869 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b5415e6-320d-4957-b6f9-045c625ed624-log-httpd\") pod \"ceilometer-0\" (UID: \"6b5415e6-320d-4957-b6f9-045c625ed624\") " pod="openstack/ceilometer-0" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.934565 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b5415e6-320d-4957-b6f9-045c625ed624-scripts\") pod \"ceilometer-0\" (UID: \"6b5415e6-320d-4957-b6f9-045c625ed624\") " pod="openstack/ceilometer-0" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.935692 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6b5415e6-320d-4957-b6f9-045c625ed624-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6b5415e6-320d-4957-b6f9-045c625ed624\") " pod="openstack/ceilometer-0" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.937958 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b5415e6-320d-4957-b6f9-045c625ed624-config-data\") pod \"ceilometer-0\" (UID: \"6b5415e6-320d-4957-b6f9-045c625ed624\") " pod="openstack/ceilometer-0" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.938948 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b5415e6-320d-4957-b6f9-045c625ed624-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6b5415e6-320d-4957-b6f9-045c625ed624\") " pod="openstack/ceilometer-0" Dec 05 06:13:21 crc kubenswrapper[4742]: I1205 06:13:21.960033 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rcn6\" (UniqueName: \"kubernetes.io/projected/6b5415e6-320d-4957-b6f9-045c625ed624-kube-api-access-6rcn6\") pod \"ceilometer-0\" (UID: \"6b5415e6-320d-4957-b6f9-045c625ed624\") " pod="openstack/ceilometer-0" Dec 05 06:13:22 crc kubenswrapper[4742]: I1205 06:13:22.017679 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 06:13:22 crc kubenswrapper[4742]: I1205 06:13:22.396128 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5144636d-7c40-4885-88c8-9cd38ffb0aec" path="/var/lib/kubelet/pods/5144636d-7c40-4885-88c8-9cd38ffb0aec/volumes" Dec 05 06:13:22 crc kubenswrapper[4742]: I1205 06:13:22.563127 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:13:22 crc kubenswrapper[4742]: I1205 06:13:22.632463 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b5415e6-320d-4957-b6f9-045c625ed624","Type":"ContainerStarted","Data":"a3f28f152564af1817bc17e5d7b6833ced2ffe5149e9b070ae3cb7bc6f569a01"} Dec 05 06:13:23 crc kubenswrapper[4742]: I1205 06:13:23.429110 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 06:13:23 crc kubenswrapper[4742]: I1205 06:13:23.431117 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 06:13:23 crc kubenswrapper[4742]: I1205 06:13:23.644288 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b5415e6-320d-4957-b6f9-045c625ed624","Type":"ContainerStarted","Data":"523fcbc93993d93ce5d60e372636854a73cff181f5007baa19e27e87705ed6eb"} Dec 05 06:13:24 crc kubenswrapper[4742]: I1205 06:13:24.667237 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b5415e6-320d-4957-b6f9-045c625ed624","Type":"ContainerStarted","Data":"989ea36fc77480278a9c0c731bc9744ebc9600bda470eec6054fc14e2e0af294"} Dec 05 06:13:25 crc kubenswrapper[4742]: I1205 06:13:25.679511 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b5415e6-320d-4957-b6f9-045c625ed624","Type":"ContainerStarted","Data":"0fee4bfffd1c6d9fb847a64ac8cce05cf829a5e628acf6b91b83d94c99ceebd7"} Dec 05 06:13:27 crc kubenswrapper[4742]: I1205 06:13:27.708951 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b5415e6-320d-4957-b6f9-045c625ed624","Type":"ContainerStarted","Data":"a87001c923191d4641603442e8ce551a7bd18b6c35ae8575a0dc29a6bb9c9878"} Dec 05 06:13:27 crc kubenswrapper[4742]: I1205 06:13:27.709648 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 06:13:27 crc kubenswrapper[4742]: I1205 06:13:27.738692 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.748657911 podStartE2EDuration="6.738664444s" podCreationTimestamp="2025-12-05 06:13:21 +0000 UTC" firstStartedPulling="2025-12-05 06:13:22.574948298 +0000 UTC m=+1278.487083400" lastFinishedPulling="2025-12-05 06:13:26.564954871 +0000 UTC m=+1282.477089933" observedRunningTime="2025-12-05 06:13:27.734877033 +0000 UTC m=+1283.647012105" watchObservedRunningTime="2025-12-05 06:13:27.738664444 +0000 UTC m=+1283.650799536" Dec 05 06:13:29 crc kubenswrapper[4742]: I1205 06:13:29.729453 4742 generic.go:334] "Generic (PLEG): container finished" podID="a53c417d-f914-40f7-a7dd-47cafb2b6718" containerID="e6ae9e5aa6d032b3ba8965356ae8b4ff4257705bc6f7ae6526108bfef473a057" exitCode=0 Dec 05 06:13:29 crc kubenswrapper[4742]: I1205 06:13:29.729546 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-t2p4w" event={"ID":"a53c417d-f914-40f7-a7dd-47cafb2b6718","Type":"ContainerDied","Data":"e6ae9e5aa6d032b3ba8965356ae8b4ff4257705bc6f7ae6526108bfef473a057"} Dec 05 06:13:31 crc kubenswrapper[4742]: I1205 06:13:31.212506 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-t2p4w" Dec 05 06:13:31 crc kubenswrapper[4742]: I1205 06:13:31.375505 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4f2rn\" (UniqueName: \"kubernetes.io/projected/a53c417d-f914-40f7-a7dd-47cafb2b6718-kube-api-access-4f2rn\") pod \"a53c417d-f914-40f7-a7dd-47cafb2b6718\" (UID: \"a53c417d-f914-40f7-a7dd-47cafb2b6718\") " Dec 05 06:13:31 crc kubenswrapper[4742]: I1205 06:13:31.376630 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a53c417d-f914-40f7-a7dd-47cafb2b6718-scripts\") pod \"a53c417d-f914-40f7-a7dd-47cafb2b6718\" (UID: \"a53c417d-f914-40f7-a7dd-47cafb2b6718\") " Dec 05 06:13:31 crc kubenswrapper[4742]: I1205 06:13:31.376670 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a53c417d-f914-40f7-a7dd-47cafb2b6718-combined-ca-bundle\") pod \"a53c417d-f914-40f7-a7dd-47cafb2b6718\" (UID: \"a53c417d-f914-40f7-a7dd-47cafb2b6718\") " Dec 05 06:13:31 crc kubenswrapper[4742]: I1205 06:13:31.376750 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a53c417d-f914-40f7-a7dd-47cafb2b6718-config-data\") pod \"a53c417d-f914-40f7-a7dd-47cafb2b6718\" (UID: \"a53c417d-f914-40f7-a7dd-47cafb2b6718\") " Dec 05 06:13:31 crc kubenswrapper[4742]: I1205 06:13:31.382965 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a53c417d-f914-40f7-a7dd-47cafb2b6718-kube-api-access-4f2rn" (OuterVolumeSpecName: "kube-api-access-4f2rn") pod "a53c417d-f914-40f7-a7dd-47cafb2b6718" (UID: "a53c417d-f914-40f7-a7dd-47cafb2b6718"). InnerVolumeSpecName "kube-api-access-4f2rn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:13:31 crc kubenswrapper[4742]: I1205 06:13:31.389269 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a53c417d-f914-40f7-a7dd-47cafb2b6718-scripts" (OuterVolumeSpecName: "scripts") pod "a53c417d-f914-40f7-a7dd-47cafb2b6718" (UID: "a53c417d-f914-40f7-a7dd-47cafb2b6718"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:31 crc kubenswrapper[4742]: I1205 06:13:31.405913 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a53c417d-f914-40f7-a7dd-47cafb2b6718-config-data" (OuterVolumeSpecName: "config-data") pod "a53c417d-f914-40f7-a7dd-47cafb2b6718" (UID: "a53c417d-f914-40f7-a7dd-47cafb2b6718"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:31 crc kubenswrapper[4742]: I1205 06:13:31.408253 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a53c417d-f914-40f7-a7dd-47cafb2b6718-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a53c417d-f914-40f7-a7dd-47cafb2b6718" (UID: "a53c417d-f914-40f7-a7dd-47cafb2b6718"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:31 crc kubenswrapper[4742]: I1205 06:13:31.479428 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4f2rn\" (UniqueName: \"kubernetes.io/projected/a53c417d-f914-40f7-a7dd-47cafb2b6718-kube-api-access-4f2rn\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:31 crc kubenswrapper[4742]: I1205 06:13:31.479470 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a53c417d-f914-40f7-a7dd-47cafb2b6718-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:31 crc kubenswrapper[4742]: I1205 06:13:31.479484 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a53c417d-f914-40f7-a7dd-47cafb2b6718-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:31 crc kubenswrapper[4742]: I1205 06:13:31.479495 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a53c417d-f914-40f7-a7dd-47cafb2b6718-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:31 crc kubenswrapper[4742]: I1205 06:13:31.764799 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-t2p4w" event={"ID":"a53c417d-f914-40f7-a7dd-47cafb2b6718","Type":"ContainerDied","Data":"cc5a37d2cc27e77b417c0529170d4d005e83c9b90560a3c8a8c575bee237f850"} Dec 05 06:13:31 crc kubenswrapper[4742]: I1205 06:13:31.764847 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cc5a37d2cc27e77b417c0529170d4d005e83c9b90560a3c8a8c575bee237f850" Dec 05 06:13:31 crc kubenswrapper[4742]: I1205 06:13:31.764920 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-t2p4w" Dec 05 06:13:31 crc kubenswrapper[4742]: I1205 06:13:31.887138 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 06:13:31 crc kubenswrapper[4742]: E1205 06:13:31.887887 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a53c417d-f914-40f7-a7dd-47cafb2b6718" containerName="nova-cell0-conductor-db-sync" Dec 05 06:13:31 crc kubenswrapper[4742]: I1205 06:13:31.887913 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="a53c417d-f914-40f7-a7dd-47cafb2b6718" containerName="nova-cell0-conductor-db-sync" Dec 05 06:13:31 crc kubenswrapper[4742]: I1205 06:13:31.888303 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="a53c417d-f914-40f7-a7dd-47cafb2b6718" containerName="nova-cell0-conductor-db-sync" Dec 05 06:13:31 crc kubenswrapper[4742]: I1205 06:13:31.889445 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 06:13:31 crc kubenswrapper[4742]: I1205 06:13:31.896232 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-6q6t9" Dec 05 06:13:31 crc kubenswrapper[4742]: I1205 06:13:31.896512 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 06:13:31 crc kubenswrapper[4742]: I1205 06:13:31.899880 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 06:13:31 crc kubenswrapper[4742]: I1205 06:13:31.903346 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/338b9928-12cd-4db4-806e-4f42612c5ab6-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"338b9928-12cd-4db4-806e-4f42612c5ab6\") " pod="openstack/nova-cell0-conductor-0" Dec 05 06:13:31 crc kubenswrapper[4742]: I1205 06:13:31.903423 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/338b9928-12cd-4db4-806e-4f42612c5ab6-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"338b9928-12cd-4db4-806e-4f42612c5ab6\") " pod="openstack/nova-cell0-conductor-0" Dec 05 06:13:31 crc kubenswrapper[4742]: I1205 06:13:31.903465 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lf5d9\" (UniqueName: \"kubernetes.io/projected/338b9928-12cd-4db4-806e-4f42612c5ab6-kube-api-access-lf5d9\") pod \"nova-cell0-conductor-0\" (UID: \"338b9928-12cd-4db4-806e-4f42612c5ab6\") " pod="openstack/nova-cell0-conductor-0" Dec 05 06:13:32 crc kubenswrapper[4742]: I1205 06:13:32.005156 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/338b9928-12cd-4db4-806e-4f42612c5ab6-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"338b9928-12cd-4db4-806e-4f42612c5ab6\") " pod="openstack/nova-cell0-conductor-0" Dec 05 06:13:32 crc kubenswrapper[4742]: I1205 06:13:32.005284 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/338b9928-12cd-4db4-806e-4f42612c5ab6-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"338b9928-12cd-4db4-806e-4f42612c5ab6\") " pod="openstack/nova-cell0-conductor-0" Dec 05 06:13:32 crc kubenswrapper[4742]: I1205 06:13:32.005350 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lf5d9\" (UniqueName: \"kubernetes.io/projected/338b9928-12cd-4db4-806e-4f42612c5ab6-kube-api-access-lf5d9\") pod \"nova-cell0-conductor-0\" (UID: \"338b9928-12cd-4db4-806e-4f42612c5ab6\") " pod="openstack/nova-cell0-conductor-0" Dec 05 06:13:32 crc kubenswrapper[4742]: I1205 06:13:32.011158 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/338b9928-12cd-4db4-806e-4f42612c5ab6-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"338b9928-12cd-4db4-806e-4f42612c5ab6\") " pod="openstack/nova-cell0-conductor-0" Dec 05 06:13:32 crc kubenswrapper[4742]: I1205 06:13:32.011517 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/338b9928-12cd-4db4-806e-4f42612c5ab6-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"338b9928-12cd-4db4-806e-4f42612c5ab6\") " pod="openstack/nova-cell0-conductor-0" Dec 05 06:13:32 crc kubenswrapper[4742]: I1205 06:13:32.022782 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lf5d9\" (UniqueName: \"kubernetes.io/projected/338b9928-12cd-4db4-806e-4f42612c5ab6-kube-api-access-lf5d9\") pod \"nova-cell0-conductor-0\" (UID: \"338b9928-12cd-4db4-806e-4f42612c5ab6\") " pod="openstack/nova-cell0-conductor-0" Dec 05 06:13:32 crc kubenswrapper[4742]: I1205 06:13:32.219050 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 06:13:32 crc kubenswrapper[4742]: I1205 06:13:32.740502 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 06:13:32 crc kubenswrapper[4742]: I1205 06:13:32.774225 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"338b9928-12cd-4db4-806e-4f42612c5ab6","Type":"ContainerStarted","Data":"0631c7c2c5eef14ea1a5a5a7e1bfefb3a3e91d5cfd46a553ce1ba9013d0194ae"} Dec 05 06:13:33 crc kubenswrapper[4742]: I1205 06:13:33.792156 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"338b9928-12cd-4db4-806e-4f42612c5ab6","Type":"ContainerStarted","Data":"85c2b0d2bbfb8b6e3c396234c6a2e7332b515e33d6f3309f0bbf9466f03f62a0"} Dec 05 06:13:33 crc kubenswrapper[4742]: I1205 06:13:33.793941 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 05 06:13:37 crc kubenswrapper[4742]: I1205 06:13:37.268745 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 05 06:13:37 crc kubenswrapper[4742]: I1205 06:13:37.304909 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=6.304881827 podStartE2EDuration="6.304881827s" podCreationTimestamp="2025-12-05 06:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:13:33.826616089 +0000 UTC m=+1289.738751191" watchObservedRunningTime="2025-12-05 06:13:37.304881827 +0000 UTC m=+1293.217016899" Dec 05 06:13:37 crc kubenswrapper[4742]: I1205 06:13:37.917983 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-4qhkv"] Dec 05 06:13:37 crc kubenswrapper[4742]: I1205 06:13:37.919428 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-4qhkv" Dec 05 06:13:37 crc kubenswrapper[4742]: I1205 06:13:37.921624 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 05 06:13:37 crc kubenswrapper[4742]: I1205 06:13:37.922614 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 05 06:13:37 crc kubenswrapper[4742]: I1205 06:13:37.951493 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvmcq\" (UniqueName: \"kubernetes.io/projected/29e2d29f-d9a8-4c42-b79e-4b287ec09187-kube-api-access-cvmcq\") pod \"nova-cell0-cell-mapping-4qhkv\" (UID: \"29e2d29f-d9a8-4c42-b79e-4b287ec09187\") " pod="openstack/nova-cell0-cell-mapping-4qhkv" Dec 05 06:13:37 crc kubenswrapper[4742]: I1205 06:13:37.951686 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29e2d29f-d9a8-4c42-b79e-4b287ec09187-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-4qhkv\" (UID: \"29e2d29f-d9a8-4c42-b79e-4b287ec09187\") " pod="openstack/nova-cell0-cell-mapping-4qhkv" Dec 05 06:13:37 crc kubenswrapper[4742]: I1205 06:13:37.951814 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29e2d29f-d9a8-4c42-b79e-4b287ec09187-scripts\") pod \"nova-cell0-cell-mapping-4qhkv\" (UID: \"29e2d29f-d9a8-4c42-b79e-4b287ec09187\") " pod="openstack/nova-cell0-cell-mapping-4qhkv" Dec 05 06:13:37 crc kubenswrapper[4742]: I1205 06:13:37.951934 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-4qhkv"] Dec 05 06:13:37 crc kubenswrapper[4742]: I1205 06:13:37.951987 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29e2d29f-d9a8-4c42-b79e-4b287ec09187-config-data\") pod \"nova-cell0-cell-mapping-4qhkv\" (UID: \"29e2d29f-d9a8-4c42-b79e-4b287ec09187\") " pod="openstack/nova-cell0-cell-mapping-4qhkv" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.054212 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29e2d29f-d9a8-4c42-b79e-4b287ec09187-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-4qhkv\" (UID: \"29e2d29f-d9a8-4c42-b79e-4b287ec09187\") " pod="openstack/nova-cell0-cell-mapping-4qhkv" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.054300 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29e2d29f-d9a8-4c42-b79e-4b287ec09187-scripts\") pod \"nova-cell0-cell-mapping-4qhkv\" (UID: \"29e2d29f-d9a8-4c42-b79e-4b287ec09187\") " pod="openstack/nova-cell0-cell-mapping-4qhkv" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.054385 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29e2d29f-d9a8-4c42-b79e-4b287ec09187-config-data\") pod \"nova-cell0-cell-mapping-4qhkv\" (UID: \"29e2d29f-d9a8-4c42-b79e-4b287ec09187\") " pod="openstack/nova-cell0-cell-mapping-4qhkv" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.054546 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvmcq\" (UniqueName: \"kubernetes.io/projected/29e2d29f-d9a8-4c42-b79e-4b287ec09187-kube-api-access-cvmcq\") pod \"nova-cell0-cell-mapping-4qhkv\" (UID: \"29e2d29f-d9a8-4c42-b79e-4b287ec09187\") " pod="openstack/nova-cell0-cell-mapping-4qhkv" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.064968 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29e2d29f-d9a8-4c42-b79e-4b287ec09187-scripts\") pod \"nova-cell0-cell-mapping-4qhkv\" (UID: \"29e2d29f-d9a8-4c42-b79e-4b287ec09187\") " pod="openstack/nova-cell0-cell-mapping-4qhkv" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.065003 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29e2d29f-d9a8-4c42-b79e-4b287ec09187-config-data\") pod \"nova-cell0-cell-mapping-4qhkv\" (UID: \"29e2d29f-d9a8-4c42-b79e-4b287ec09187\") " pod="openstack/nova-cell0-cell-mapping-4qhkv" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.085078 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29e2d29f-d9a8-4c42-b79e-4b287ec09187-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-4qhkv\" (UID: \"29e2d29f-d9a8-4c42-b79e-4b287ec09187\") " pod="openstack/nova-cell0-cell-mapping-4qhkv" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.101669 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvmcq\" (UniqueName: \"kubernetes.io/projected/29e2d29f-d9a8-4c42-b79e-4b287ec09187-kube-api-access-cvmcq\") pod \"nova-cell0-cell-mapping-4qhkv\" (UID: \"29e2d29f-d9a8-4c42-b79e-4b287ec09187\") " pod="openstack/nova-cell0-cell-mapping-4qhkv" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.147539 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.149643 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.152562 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.176377 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.249883 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-4qhkv" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.257283 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f888767f-2f9d-4514-805d-03de7aa50110-config-data\") pod \"nova-api-0\" (UID: \"f888767f-2f9d-4514-805d-03de7aa50110\") " pod="openstack/nova-api-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.257361 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xbq7\" (UniqueName: \"kubernetes.io/projected/f888767f-2f9d-4514-805d-03de7aa50110-kube-api-access-6xbq7\") pod \"nova-api-0\" (UID: \"f888767f-2f9d-4514-805d-03de7aa50110\") " pod="openstack/nova-api-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.257409 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f888767f-2f9d-4514-805d-03de7aa50110-logs\") pod \"nova-api-0\" (UID: \"f888767f-2f9d-4514-805d-03de7aa50110\") " pod="openstack/nova-api-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.257558 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f888767f-2f9d-4514-805d-03de7aa50110-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f888767f-2f9d-4514-805d-03de7aa50110\") " pod="openstack/nova-api-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.272109 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.274562 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.278469 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.369839 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4e8c4f7-a7ad-4b52-a835-2dc38deb0020-config-data\") pod \"nova-scheduler-0\" (UID: \"d4e8c4f7-a7ad-4b52-a835-2dc38deb0020\") " pod="openstack/nova-scheduler-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.369895 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4e8c4f7-a7ad-4b52-a835-2dc38deb0020-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d4e8c4f7-a7ad-4b52-a835-2dc38deb0020\") " pod="openstack/nova-scheduler-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.369934 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f888767f-2f9d-4514-805d-03de7aa50110-config-data\") pod \"nova-api-0\" (UID: \"f888767f-2f9d-4514-805d-03de7aa50110\") " pod="openstack/nova-api-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.370940 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hs97d\" (UniqueName: \"kubernetes.io/projected/d4e8c4f7-a7ad-4b52-a835-2dc38deb0020-kube-api-access-hs97d\") pod \"nova-scheduler-0\" (UID: \"d4e8c4f7-a7ad-4b52-a835-2dc38deb0020\") " pod="openstack/nova-scheduler-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.371009 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xbq7\" (UniqueName: \"kubernetes.io/projected/f888767f-2f9d-4514-805d-03de7aa50110-kube-api-access-6xbq7\") pod \"nova-api-0\" (UID: \"f888767f-2f9d-4514-805d-03de7aa50110\") " pod="openstack/nova-api-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.371143 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f888767f-2f9d-4514-805d-03de7aa50110-logs\") pod \"nova-api-0\" (UID: \"f888767f-2f9d-4514-805d-03de7aa50110\") " pod="openstack/nova-api-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.371387 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f888767f-2f9d-4514-805d-03de7aa50110-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f888767f-2f9d-4514-805d-03de7aa50110\") " pod="openstack/nova-api-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.376113 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f888767f-2f9d-4514-805d-03de7aa50110-config-data\") pod \"nova-api-0\" (UID: \"f888767f-2f9d-4514-805d-03de7aa50110\") " pod="openstack/nova-api-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.376487 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f888767f-2f9d-4514-805d-03de7aa50110-logs\") pod \"nova-api-0\" (UID: \"f888767f-2f9d-4514-805d-03de7aa50110\") " pod="openstack/nova-api-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.400982 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f888767f-2f9d-4514-805d-03de7aa50110-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f888767f-2f9d-4514-805d-03de7aa50110\") " pod="openstack/nova-api-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.425693 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xbq7\" (UniqueName: \"kubernetes.io/projected/f888767f-2f9d-4514-805d-03de7aa50110-kube-api-access-6xbq7\") pod \"nova-api-0\" (UID: \"f888767f-2f9d-4514-805d-03de7aa50110\") " pod="openstack/nova-api-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.453978 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.455933 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.460443 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.471485 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.472378 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.472905 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4e8c4f7-a7ad-4b52-a835-2dc38deb0020-config-data\") pod \"nova-scheduler-0\" (UID: \"d4e8c4f7-a7ad-4b52-a835-2dc38deb0020\") " pod="openstack/nova-scheduler-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.472930 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4e8c4f7-a7ad-4b52-a835-2dc38deb0020-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d4e8c4f7-a7ad-4b52-a835-2dc38deb0020\") " pod="openstack/nova-scheduler-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.472964 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hs97d\" (UniqueName: \"kubernetes.io/projected/d4e8c4f7-a7ad-4b52-a835-2dc38deb0020-kube-api-access-hs97d\") pod \"nova-scheduler-0\" (UID: \"d4e8c4f7-a7ad-4b52-a835-2dc38deb0020\") " pod="openstack/nova-scheduler-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.478472 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4e8c4f7-a7ad-4b52-a835-2dc38deb0020-config-data\") pod \"nova-scheduler-0\" (UID: \"d4e8c4f7-a7ad-4b52-a835-2dc38deb0020\") " pod="openstack/nova-scheduler-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.497290 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4e8c4f7-a7ad-4b52-a835-2dc38deb0020-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d4e8c4f7-a7ad-4b52-a835-2dc38deb0020\") " pod="openstack/nova-scheduler-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.500124 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hs97d\" (UniqueName: \"kubernetes.io/projected/d4e8c4f7-a7ad-4b52-a835-2dc38deb0020-kube-api-access-hs97d\") pod \"nova-scheduler-0\" (UID: \"d4e8c4f7-a7ad-4b52-a835-2dc38deb0020\") " pod="openstack/nova-scheduler-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.537742 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.557076 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.558870 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.561456 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.568703 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.574166 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pfd8q\" (UniqueName: \"kubernetes.io/projected/3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28-kube-api-access-pfd8q\") pod \"nova-cell1-novncproxy-0\" (UID: \"3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.574214 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.574282 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.587354 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-fp96k"] Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.590369 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-fp96k" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.593110 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-fp96k"] Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.616424 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.679044 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14acb226-dcfb-4756-a0db-76a1a9419c43-config-data\") pod \"nova-metadata-0\" (UID: \"14acb226-dcfb-4756-a0db-76a1a9419c43\") " pod="openstack/nova-metadata-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.679114 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.679139 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9zfn4\" (UniqueName: \"kubernetes.io/projected/14acb226-dcfb-4756-a0db-76a1a9419c43-kube-api-access-9zfn4\") pod \"nova-metadata-0\" (UID: \"14acb226-dcfb-4756-a0db-76a1a9419c43\") " pod="openstack/nova-metadata-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.679158 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14acb226-dcfb-4756-a0db-76a1a9419c43-logs\") pod \"nova-metadata-0\" (UID: \"14acb226-dcfb-4756-a0db-76a1a9419c43\") " pod="openstack/nova-metadata-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.679240 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14acb226-dcfb-4756-a0db-76a1a9419c43-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"14acb226-dcfb-4756-a0db-76a1a9419c43\") " pod="openstack/nova-metadata-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.679270 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pfd8q\" (UniqueName: \"kubernetes.io/projected/3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28-kube-api-access-pfd8q\") pod \"nova-cell1-novncproxy-0\" (UID: \"3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.679292 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.692876 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.692885 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.709652 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pfd8q\" (UniqueName: \"kubernetes.io/projected/3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28-kube-api-access-pfd8q\") pod \"nova-cell1-novncproxy-0\" (UID: \"3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.783123 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-config\") pod \"dnsmasq-dns-bccf8f775-fp96k\" (UID: \"a09bb1ea-b66c-4ae0-9a77-456a95a914b4\") " pod="openstack/dnsmasq-dns-bccf8f775-fp96k" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.783470 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-dns-svc\") pod \"dnsmasq-dns-bccf8f775-fp96k\" (UID: \"a09bb1ea-b66c-4ae0-9a77-456a95a914b4\") " pod="openstack/dnsmasq-dns-bccf8f775-fp96k" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.783505 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14acb226-dcfb-4756-a0db-76a1a9419c43-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"14acb226-dcfb-4756-a0db-76a1a9419c43\") " pod="openstack/nova-metadata-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.783533 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-dns-swift-storage-0\") pod \"dnsmasq-dns-bccf8f775-fp96k\" (UID: \"a09bb1ea-b66c-4ae0-9a77-456a95a914b4\") " pod="openstack/dnsmasq-dns-bccf8f775-fp96k" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.783587 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-ovsdbserver-sb\") pod \"dnsmasq-dns-bccf8f775-fp96k\" (UID: \"a09bb1ea-b66c-4ae0-9a77-456a95a914b4\") " pod="openstack/dnsmasq-dns-bccf8f775-fp96k" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.783620 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6z46\" (UniqueName: \"kubernetes.io/projected/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-kube-api-access-c6z46\") pod \"dnsmasq-dns-bccf8f775-fp96k\" (UID: \"a09bb1ea-b66c-4ae0-9a77-456a95a914b4\") " pod="openstack/dnsmasq-dns-bccf8f775-fp96k" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.783637 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-ovsdbserver-nb\") pod \"dnsmasq-dns-bccf8f775-fp96k\" (UID: \"a09bb1ea-b66c-4ae0-9a77-456a95a914b4\") " pod="openstack/dnsmasq-dns-bccf8f775-fp96k" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.783661 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14acb226-dcfb-4756-a0db-76a1a9419c43-config-data\") pod \"nova-metadata-0\" (UID: \"14acb226-dcfb-4756-a0db-76a1a9419c43\") " pod="openstack/nova-metadata-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.783729 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9zfn4\" (UniqueName: \"kubernetes.io/projected/14acb226-dcfb-4756-a0db-76a1a9419c43-kube-api-access-9zfn4\") pod \"nova-metadata-0\" (UID: \"14acb226-dcfb-4756-a0db-76a1a9419c43\") " pod="openstack/nova-metadata-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.783752 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14acb226-dcfb-4756-a0db-76a1a9419c43-logs\") pod \"nova-metadata-0\" (UID: \"14acb226-dcfb-4756-a0db-76a1a9419c43\") " pod="openstack/nova-metadata-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.784257 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14acb226-dcfb-4756-a0db-76a1a9419c43-logs\") pod \"nova-metadata-0\" (UID: \"14acb226-dcfb-4756-a0db-76a1a9419c43\") " pod="openstack/nova-metadata-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.791301 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14acb226-dcfb-4756-a0db-76a1a9419c43-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"14acb226-dcfb-4756-a0db-76a1a9419c43\") " pod="openstack/nova-metadata-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.794376 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14acb226-dcfb-4756-a0db-76a1a9419c43-config-data\") pod \"nova-metadata-0\" (UID: \"14acb226-dcfb-4756-a0db-76a1a9419c43\") " pod="openstack/nova-metadata-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.805448 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9zfn4\" (UniqueName: \"kubernetes.io/projected/14acb226-dcfb-4756-a0db-76a1a9419c43-kube-api-access-9zfn4\") pod \"nova-metadata-0\" (UID: \"14acb226-dcfb-4756-a0db-76a1a9419c43\") " pod="openstack/nova-metadata-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.810334 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.879185 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.886503 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-config\") pod \"dnsmasq-dns-bccf8f775-fp96k\" (UID: \"a09bb1ea-b66c-4ae0-9a77-456a95a914b4\") " pod="openstack/dnsmasq-dns-bccf8f775-fp96k" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.886624 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-dns-svc\") pod \"dnsmasq-dns-bccf8f775-fp96k\" (UID: \"a09bb1ea-b66c-4ae0-9a77-456a95a914b4\") " pod="openstack/dnsmasq-dns-bccf8f775-fp96k" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.886688 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-dns-swift-storage-0\") pod \"dnsmasq-dns-bccf8f775-fp96k\" (UID: \"a09bb1ea-b66c-4ae0-9a77-456a95a914b4\") " pod="openstack/dnsmasq-dns-bccf8f775-fp96k" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.886772 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-ovsdbserver-sb\") pod \"dnsmasq-dns-bccf8f775-fp96k\" (UID: \"a09bb1ea-b66c-4ae0-9a77-456a95a914b4\") " pod="openstack/dnsmasq-dns-bccf8f775-fp96k" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.886831 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6z46\" (UniqueName: \"kubernetes.io/projected/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-kube-api-access-c6z46\") pod \"dnsmasq-dns-bccf8f775-fp96k\" (UID: \"a09bb1ea-b66c-4ae0-9a77-456a95a914b4\") " pod="openstack/dnsmasq-dns-bccf8f775-fp96k" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.888102 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-dns-svc\") pod \"dnsmasq-dns-bccf8f775-fp96k\" (UID: \"a09bb1ea-b66c-4ae0-9a77-456a95a914b4\") " pod="openstack/dnsmasq-dns-bccf8f775-fp96k" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.888762 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-config\") pod \"dnsmasq-dns-bccf8f775-fp96k\" (UID: \"a09bb1ea-b66c-4ae0-9a77-456a95a914b4\") " pod="openstack/dnsmasq-dns-bccf8f775-fp96k" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.888814 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-dns-swift-storage-0\") pod \"dnsmasq-dns-bccf8f775-fp96k\" (UID: \"a09bb1ea-b66c-4ae0-9a77-456a95a914b4\") " pod="openstack/dnsmasq-dns-bccf8f775-fp96k" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.889577 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-ovsdbserver-sb\") pod \"dnsmasq-dns-bccf8f775-fp96k\" (UID: \"a09bb1ea-b66c-4ae0-9a77-456a95a914b4\") " pod="openstack/dnsmasq-dns-bccf8f775-fp96k" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.892357 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-ovsdbserver-nb\") pod \"dnsmasq-dns-bccf8f775-fp96k\" (UID: \"a09bb1ea-b66c-4ae0-9a77-456a95a914b4\") " pod="openstack/dnsmasq-dns-bccf8f775-fp96k" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.893016 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-ovsdbserver-nb\") pod \"dnsmasq-dns-bccf8f775-fp96k\" (UID: \"a09bb1ea-b66c-4ae0-9a77-456a95a914b4\") " pod="openstack/dnsmasq-dns-bccf8f775-fp96k" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.911110 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6z46\" (UniqueName: \"kubernetes.io/projected/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-kube-api-access-c6z46\") pod \"dnsmasq-dns-bccf8f775-fp96k\" (UID: \"a09bb1ea-b66c-4ae0-9a77-456a95a914b4\") " pod="openstack/dnsmasq-dns-bccf8f775-fp96k" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.923724 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-fp96k" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.954023 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-q5mkg"] Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.955627 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-q5mkg" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.958258 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.958435 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.966967 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-q5mkg"] Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.977212 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-4qhkv"] Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.993345 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a998c383-44cc-4b30-a27b-57860fdd3353-config-data\") pod \"nova-cell1-conductor-db-sync-q5mkg\" (UID: \"a998c383-44cc-4b30-a27b-57860fdd3353\") " pod="openstack/nova-cell1-conductor-db-sync-q5mkg" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.993421 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k95mq\" (UniqueName: \"kubernetes.io/projected/a998c383-44cc-4b30-a27b-57860fdd3353-kube-api-access-k95mq\") pod \"nova-cell1-conductor-db-sync-q5mkg\" (UID: \"a998c383-44cc-4b30-a27b-57860fdd3353\") " pod="openstack/nova-cell1-conductor-db-sync-q5mkg" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.993450 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a998c383-44cc-4b30-a27b-57860fdd3353-scripts\") pod \"nova-cell1-conductor-db-sync-q5mkg\" (UID: \"a998c383-44cc-4b30-a27b-57860fdd3353\") " pod="openstack/nova-cell1-conductor-db-sync-q5mkg" Dec 05 06:13:38 crc kubenswrapper[4742]: I1205 06:13:38.993500 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a998c383-44cc-4b30-a27b-57860fdd3353-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-q5mkg\" (UID: \"a998c383-44cc-4b30-a27b-57860fdd3353\") " pod="openstack/nova-cell1-conductor-db-sync-q5mkg" Dec 05 06:13:39 crc kubenswrapper[4742]: I1205 06:13:39.062482 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 06:13:39 crc kubenswrapper[4742]: I1205 06:13:39.095486 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k95mq\" (UniqueName: \"kubernetes.io/projected/a998c383-44cc-4b30-a27b-57860fdd3353-kube-api-access-k95mq\") pod \"nova-cell1-conductor-db-sync-q5mkg\" (UID: \"a998c383-44cc-4b30-a27b-57860fdd3353\") " pod="openstack/nova-cell1-conductor-db-sync-q5mkg" Dec 05 06:13:39 crc kubenswrapper[4742]: I1205 06:13:39.095942 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a998c383-44cc-4b30-a27b-57860fdd3353-scripts\") pod \"nova-cell1-conductor-db-sync-q5mkg\" (UID: \"a998c383-44cc-4b30-a27b-57860fdd3353\") " pod="openstack/nova-cell1-conductor-db-sync-q5mkg" Dec 05 06:13:39 crc kubenswrapper[4742]: I1205 06:13:39.096028 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a998c383-44cc-4b30-a27b-57860fdd3353-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-q5mkg\" (UID: \"a998c383-44cc-4b30-a27b-57860fdd3353\") " pod="openstack/nova-cell1-conductor-db-sync-q5mkg" Dec 05 06:13:39 crc kubenswrapper[4742]: I1205 06:13:39.096125 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a998c383-44cc-4b30-a27b-57860fdd3353-config-data\") pod \"nova-cell1-conductor-db-sync-q5mkg\" (UID: \"a998c383-44cc-4b30-a27b-57860fdd3353\") " pod="openstack/nova-cell1-conductor-db-sync-q5mkg" Dec 05 06:13:39 crc kubenswrapper[4742]: I1205 06:13:39.102502 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a998c383-44cc-4b30-a27b-57860fdd3353-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-q5mkg\" (UID: \"a998c383-44cc-4b30-a27b-57860fdd3353\") " pod="openstack/nova-cell1-conductor-db-sync-q5mkg" Dec 05 06:13:39 crc kubenswrapper[4742]: I1205 06:13:39.103832 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a998c383-44cc-4b30-a27b-57860fdd3353-config-data\") pod \"nova-cell1-conductor-db-sync-q5mkg\" (UID: \"a998c383-44cc-4b30-a27b-57860fdd3353\") " pod="openstack/nova-cell1-conductor-db-sync-q5mkg" Dec 05 06:13:39 crc kubenswrapper[4742]: I1205 06:13:39.113839 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a998c383-44cc-4b30-a27b-57860fdd3353-scripts\") pod \"nova-cell1-conductor-db-sync-q5mkg\" (UID: \"a998c383-44cc-4b30-a27b-57860fdd3353\") " pod="openstack/nova-cell1-conductor-db-sync-q5mkg" Dec 05 06:13:39 crc kubenswrapper[4742]: I1205 06:13:39.121572 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k95mq\" (UniqueName: \"kubernetes.io/projected/a998c383-44cc-4b30-a27b-57860fdd3353-kube-api-access-k95mq\") pod \"nova-cell1-conductor-db-sync-q5mkg\" (UID: \"a998c383-44cc-4b30-a27b-57860fdd3353\") " pod="openstack/nova-cell1-conductor-db-sync-q5mkg" Dec 05 06:13:39 crc kubenswrapper[4742]: I1205 06:13:39.185927 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 06:13:39 crc kubenswrapper[4742]: I1205 06:13:39.272713 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-q5mkg" Dec 05 06:13:39 crc kubenswrapper[4742]: I1205 06:13:39.406782 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 06:13:39 crc kubenswrapper[4742]: I1205 06:13:39.436564 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 06:13:39 crc kubenswrapper[4742]: W1205 06:13:39.442229 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3ae101ec_d4f6_4df7_89a2_eaf0d8ca1c28.slice/crio-5ae824c6e688ffa0a3fbd738de1c1ae3475f7e82c624f9f061701173a4472c74 WatchSource:0}: Error finding container 5ae824c6e688ffa0a3fbd738de1c1ae3475f7e82c624f9f061701173a4472c74: Status 404 returned error can't find the container with id 5ae824c6e688ffa0a3fbd738de1c1ae3475f7e82c624f9f061701173a4472c74 Dec 05 06:13:39 crc kubenswrapper[4742]: I1205 06:13:39.561747 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-fp96k"] Dec 05 06:13:39 crc kubenswrapper[4742]: W1205 06:13:39.565201 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda09bb1ea_b66c_4ae0_9a77_456a95a914b4.slice/crio-04640f16acfaf6c3224629a17de588a5c6c8cd341313ea5cc805d09233db7080 WatchSource:0}: Error finding container 04640f16acfaf6c3224629a17de588a5c6c8cd341313ea5cc805d09233db7080: Status 404 returned error can't find the container with id 04640f16acfaf6c3224629a17de588a5c6c8cd341313ea5cc805d09233db7080 Dec 05 06:13:39 crc kubenswrapper[4742]: I1205 06:13:39.822843 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-q5mkg"] Dec 05 06:13:39 crc kubenswrapper[4742]: W1205 06:13:39.824535 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda998c383_44cc_4b30_a27b_57860fdd3353.slice/crio-c76a85dbaf279144e289f5493afb04b878a7a9b77cf83a0fcf378dd8ef5b1634 WatchSource:0}: Error finding container c76a85dbaf279144e289f5493afb04b878a7a9b77cf83a0fcf378dd8ef5b1634: Status 404 returned error can't find the container with id c76a85dbaf279144e289f5493afb04b878a7a9b77cf83a0fcf378dd8ef5b1634 Dec 05 06:13:39 crc kubenswrapper[4742]: I1205 06:13:39.860068 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d4e8c4f7-a7ad-4b52-a835-2dc38deb0020","Type":"ContainerStarted","Data":"81e45b9f087bd41ae5b55da27e70844c77d6b3be857311fddbf51333cdfc8710"} Dec 05 06:13:39 crc kubenswrapper[4742]: I1205 06:13:39.862249 4742 generic.go:334] "Generic (PLEG): container finished" podID="a09bb1ea-b66c-4ae0-9a77-456a95a914b4" containerID="4c5e340425dc26b212cea3450f09d341d0737833ec2979f0ec097e0522fa766b" exitCode=0 Dec 05 06:13:39 crc kubenswrapper[4742]: I1205 06:13:39.862287 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-fp96k" event={"ID":"a09bb1ea-b66c-4ae0-9a77-456a95a914b4","Type":"ContainerDied","Data":"4c5e340425dc26b212cea3450f09d341d0737833ec2979f0ec097e0522fa766b"} Dec 05 06:13:39 crc kubenswrapper[4742]: I1205 06:13:39.862335 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-fp96k" event={"ID":"a09bb1ea-b66c-4ae0-9a77-456a95a914b4","Type":"ContainerStarted","Data":"04640f16acfaf6c3224629a17de588a5c6c8cd341313ea5cc805d09233db7080"} Dec 05 06:13:39 crc kubenswrapper[4742]: I1205 06:13:39.864029 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-4qhkv" event={"ID":"29e2d29f-d9a8-4c42-b79e-4b287ec09187","Type":"ContainerStarted","Data":"96d5fa9cee2959bc805b039bf843ed8935b507be63f1df35905b848a55c83d14"} Dec 05 06:13:39 crc kubenswrapper[4742]: I1205 06:13:39.864074 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-4qhkv" event={"ID":"29e2d29f-d9a8-4c42-b79e-4b287ec09187","Type":"ContainerStarted","Data":"cf2be5438235763e916fd3873ae435f43cdababe241f1d311751676e3e9cd2e9"} Dec 05 06:13:39 crc kubenswrapper[4742]: I1205 06:13:39.867428 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-q5mkg" event={"ID":"a998c383-44cc-4b30-a27b-57860fdd3353","Type":"ContainerStarted","Data":"c76a85dbaf279144e289f5493afb04b878a7a9b77cf83a0fcf378dd8ef5b1634"} Dec 05 06:13:39 crc kubenswrapper[4742]: I1205 06:13:39.869224 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"14acb226-dcfb-4756-a0db-76a1a9419c43","Type":"ContainerStarted","Data":"21b0ad647ce6036c5bd57bf6a0c8be1830b363b72bfc0758bd33885ef15ff866"} Dec 05 06:13:39 crc kubenswrapper[4742]: I1205 06:13:39.872476 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28","Type":"ContainerStarted","Data":"5ae824c6e688ffa0a3fbd738de1c1ae3475f7e82c624f9f061701173a4472c74"} Dec 05 06:13:39 crc kubenswrapper[4742]: I1205 06:13:39.882555 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f888767f-2f9d-4514-805d-03de7aa50110","Type":"ContainerStarted","Data":"287ced1c6a46812dbfdb3133a4bf8782fba762d94e260569aa1e2cf6c84bd527"} Dec 05 06:13:39 crc kubenswrapper[4742]: I1205 06:13:39.901537 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-4qhkv" podStartSLOduration=2.9015198890000002 podStartE2EDuration="2.901519889s" podCreationTimestamp="2025-12-05 06:13:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:13:39.897627195 +0000 UTC m=+1295.809762267" watchObservedRunningTime="2025-12-05 06:13:39.901519889 +0000 UTC m=+1295.813654951" Dec 05 06:13:40 crc kubenswrapper[4742]: I1205 06:13:40.900122 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-fp96k" event={"ID":"a09bb1ea-b66c-4ae0-9a77-456a95a914b4","Type":"ContainerStarted","Data":"c11115c3b48323925558468a080d253e78c7eaf8bd95984490a799595a23bae4"} Dec 05 06:13:40 crc kubenswrapper[4742]: I1205 06:13:40.901171 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-bccf8f775-fp96k" Dec 05 06:13:40 crc kubenswrapper[4742]: I1205 06:13:40.904072 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-q5mkg" event={"ID":"a998c383-44cc-4b30-a27b-57860fdd3353","Type":"ContainerStarted","Data":"c929e4d68a7edf82190727f895b54f4ccde921cfe66539dcbe06759346886432"} Dec 05 06:13:40 crc kubenswrapper[4742]: I1205 06:13:40.921733 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-bccf8f775-fp96k" podStartSLOduration=2.921714104 podStartE2EDuration="2.921714104s" podCreationTimestamp="2025-12-05 06:13:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:13:40.917394559 +0000 UTC m=+1296.829529621" watchObservedRunningTime="2025-12-05 06:13:40.921714104 +0000 UTC m=+1296.833849166" Dec 05 06:13:40 crc kubenswrapper[4742]: I1205 06:13:40.939900 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-q5mkg" podStartSLOduration=2.939882598 podStartE2EDuration="2.939882598s" podCreationTimestamp="2025-12-05 06:13:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:13:40.93208569 +0000 UTC m=+1296.844220752" watchObservedRunningTime="2025-12-05 06:13:40.939882598 +0000 UTC m=+1296.852017660" Dec 05 06:13:41 crc kubenswrapper[4742]: I1205 06:13:41.609554 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 06:13:41 crc kubenswrapper[4742]: I1205 06:13:41.617646 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 06:13:42 crc kubenswrapper[4742]: I1205 06:13:42.920859 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"14acb226-dcfb-4756-a0db-76a1a9419c43","Type":"ContainerStarted","Data":"7606d0f90dc31b451befce2afb9d1a0631a60c2142208986c20ec5f28426d862"} Dec 05 06:13:42 crc kubenswrapper[4742]: I1205 06:13:42.922137 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28","Type":"ContainerStarted","Data":"5e9198ed7331873e639b7e979d47e18d2c852caadc64a825cf7b82063fb55bbb"} Dec 05 06:13:42 crc kubenswrapper[4742]: I1205 06:13:42.922307 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://5e9198ed7331873e639b7e979d47e18d2c852caadc64a825cf7b82063fb55bbb" gracePeriod=30 Dec 05 06:13:42 crc kubenswrapper[4742]: I1205 06:13:42.933143 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f888767f-2f9d-4514-805d-03de7aa50110","Type":"ContainerStarted","Data":"4064bfec7b7a22ff0dc6d89181766eff0375f0fa5f51c04e27c079da039a03be"} Dec 05 06:13:42 crc kubenswrapper[4742]: I1205 06:13:42.942494 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d4e8c4f7-a7ad-4b52-a835-2dc38deb0020","Type":"ContainerStarted","Data":"5e1631c3bb7fc93b7035df6f05e8a76a65e7e94a83bafa1d1089a76287823bf8"} Dec 05 06:13:42 crc kubenswrapper[4742]: I1205 06:13:42.951082 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.089547027 podStartE2EDuration="4.9510449s" podCreationTimestamp="2025-12-05 06:13:38 +0000 UTC" firstStartedPulling="2025-12-05 06:13:39.447029918 +0000 UTC m=+1295.359164980" lastFinishedPulling="2025-12-05 06:13:42.308527791 +0000 UTC m=+1298.220662853" observedRunningTime="2025-12-05 06:13:42.943694164 +0000 UTC m=+1298.855829226" watchObservedRunningTime="2025-12-05 06:13:42.9510449 +0000 UTC m=+1298.863179962" Dec 05 06:13:42 crc kubenswrapper[4742]: I1205 06:13:42.970304 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.856580212 podStartE2EDuration="4.970282882s" podCreationTimestamp="2025-12-05 06:13:38 +0000 UTC" firstStartedPulling="2025-12-05 06:13:39.192175541 +0000 UTC m=+1295.104310603" lastFinishedPulling="2025-12-05 06:13:42.305878221 +0000 UTC m=+1298.218013273" observedRunningTime="2025-12-05 06:13:42.962903206 +0000 UTC m=+1298.875038288" watchObservedRunningTime="2025-12-05 06:13:42.970282882 +0000 UTC m=+1298.882417954" Dec 05 06:13:43 crc kubenswrapper[4742]: I1205 06:13:43.617107 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 06:13:43 crc kubenswrapper[4742]: I1205 06:13:43.812285 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:13:43 crc kubenswrapper[4742]: I1205 06:13:43.956358 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"14acb226-dcfb-4756-a0db-76a1a9419c43","Type":"ContainerStarted","Data":"2d8f29f2af69615f8f8e2c3e080e5fa3ced6fd16537b1131910d4513b80965eb"} Dec 05 06:13:43 crc kubenswrapper[4742]: I1205 06:13:43.956484 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="14acb226-dcfb-4756-a0db-76a1a9419c43" containerName="nova-metadata-log" containerID="cri-o://7606d0f90dc31b451befce2afb9d1a0631a60c2142208986c20ec5f28426d862" gracePeriod=30 Dec 05 06:13:43 crc kubenswrapper[4742]: I1205 06:13:43.956529 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="14acb226-dcfb-4756-a0db-76a1a9419c43" containerName="nova-metadata-metadata" containerID="cri-o://2d8f29f2af69615f8f8e2c3e080e5fa3ced6fd16537b1131910d4513b80965eb" gracePeriod=30 Dec 05 06:13:43 crc kubenswrapper[4742]: I1205 06:13:43.963784 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f888767f-2f9d-4514-805d-03de7aa50110","Type":"ContainerStarted","Data":"038b9da41e74aa1b29139a01cebf7e8813d1faac7980048b46e40c9aaa797eb7"} Dec 05 06:13:43 crc kubenswrapper[4742]: I1205 06:13:43.990092 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.079924259 podStartE2EDuration="5.990069948s" podCreationTimestamp="2025-12-05 06:13:38 +0000 UTC" firstStartedPulling="2025-12-05 06:13:39.428523205 +0000 UTC m=+1295.340658267" lastFinishedPulling="2025-12-05 06:13:42.338668894 +0000 UTC m=+1298.250803956" observedRunningTime="2025-12-05 06:13:43.981250296 +0000 UTC m=+1299.893385398" watchObservedRunningTime="2025-12-05 06:13:43.990069948 +0000 UTC m=+1299.902205020" Dec 05 06:13:44 crc kubenswrapper[4742]: I1205 06:13:44.001949 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.773535437 podStartE2EDuration="6.001933331s" podCreationTimestamp="2025-12-05 06:13:38 +0000 UTC" firstStartedPulling="2025-12-05 06:13:39.077034715 +0000 UTC m=+1294.989169767" lastFinishedPulling="2025-12-05 06:13:42.305432559 +0000 UTC m=+1298.217567661" observedRunningTime="2025-12-05 06:13:43.998585193 +0000 UTC m=+1299.910720265" watchObservedRunningTime="2025-12-05 06:13:44.001933331 +0000 UTC m=+1299.914068393" Dec 05 06:13:44 crc kubenswrapper[4742]: I1205 06:13:44.584888 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 06:13:44 crc kubenswrapper[4742]: I1205 06:13:44.736494 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14acb226-dcfb-4756-a0db-76a1a9419c43-logs\") pod \"14acb226-dcfb-4756-a0db-76a1a9419c43\" (UID: \"14acb226-dcfb-4756-a0db-76a1a9419c43\") " Dec 05 06:13:44 crc kubenswrapper[4742]: I1205 06:13:44.736890 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14acb226-dcfb-4756-a0db-76a1a9419c43-logs" (OuterVolumeSpecName: "logs") pod "14acb226-dcfb-4756-a0db-76a1a9419c43" (UID: "14acb226-dcfb-4756-a0db-76a1a9419c43"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:13:44 crc kubenswrapper[4742]: I1205 06:13:44.736980 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9zfn4\" (UniqueName: \"kubernetes.io/projected/14acb226-dcfb-4756-a0db-76a1a9419c43-kube-api-access-9zfn4\") pod \"14acb226-dcfb-4756-a0db-76a1a9419c43\" (UID: \"14acb226-dcfb-4756-a0db-76a1a9419c43\") " Dec 05 06:13:44 crc kubenswrapper[4742]: I1205 06:13:44.737142 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14acb226-dcfb-4756-a0db-76a1a9419c43-config-data\") pod \"14acb226-dcfb-4756-a0db-76a1a9419c43\" (UID: \"14acb226-dcfb-4756-a0db-76a1a9419c43\") " Dec 05 06:13:44 crc kubenswrapper[4742]: I1205 06:13:44.737192 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14acb226-dcfb-4756-a0db-76a1a9419c43-combined-ca-bundle\") pod \"14acb226-dcfb-4756-a0db-76a1a9419c43\" (UID: \"14acb226-dcfb-4756-a0db-76a1a9419c43\") " Dec 05 06:13:44 crc kubenswrapper[4742]: I1205 06:13:44.737555 4742 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14acb226-dcfb-4756-a0db-76a1a9419c43-logs\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:44 crc kubenswrapper[4742]: I1205 06:13:44.747244 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14acb226-dcfb-4756-a0db-76a1a9419c43-kube-api-access-9zfn4" (OuterVolumeSpecName: "kube-api-access-9zfn4") pod "14acb226-dcfb-4756-a0db-76a1a9419c43" (UID: "14acb226-dcfb-4756-a0db-76a1a9419c43"). InnerVolumeSpecName "kube-api-access-9zfn4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:13:44 crc kubenswrapper[4742]: I1205 06:13:44.776137 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14acb226-dcfb-4756-a0db-76a1a9419c43-config-data" (OuterVolumeSpecName: "config-data") pod "14acb226-dcfb-4756-a0db-76a1a9419c43" (UID: "14acb226-dcfb-4756-a0db-76a1a9419c43"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:44 crc kubenswrapper[4742]: I1205 06:13:44.783238 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14acb226-dcfb-4756-a0db-76a1a9419c43-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "14acb226-dcfb-4756-a0db-76a1a9419c43" (UID: "14acb226-dcfb-4756-a0db-76a1a9419c43"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:44 crc kubenswrapper[4742]: I1205 06:13:44.839141 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14acb226-dcfb-4756-a0db-76a1a9419c43-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:44 crc kubenswrapper[4742]: I1205 06:13:44.839170 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14acb226-dcfb-4756-a0db-76a1a9419c43-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:44 crc kubenswrapper[4742]: I1205 06:13:44.839184 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9zfn4\" (UniqueName: \"kubernetes.io/projected/14acb226-dcfb-4756-a0db-76a1a9419c43-kube-api-access-9zfn4\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:44 crc kubenswrapper[4742]: I1205 06:13:44.976760 4742 generic.go:334] "Generic (PLEG): container finished" podID="14acb226-dcfb-4756-a0db-76a1a9419c43" containerID="2d8f29f2af69615f8f8e2c3e080e5fa3ced6fd16537b1131910d4513b80965eb" exitCode=0 Dec 05 06:13:44 crc kubenswrapper[4742]: I1205 06:13:44.976803 4742 generic.go:334] "Generic (PLEG): container finished" podID="14acb226-dcfb-4756-a0db-76a1a9419c43" containerID="7606d0f90dc31b451befce2afb9d1a0631a60c2142208986c20ec5f28426d862" exitCode=143 Dec 05 06:13:44 crc kubenswrapper[4742]: I1205 06:13:44.976801 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"14acb226-dcfb-4756-a0db-76a1a9419c43","Type":"ContainerDied","Data":"2d8f29f2af69615f8f8e2c3e080e5fa3ced6fd16537b1131910d4513b80965eb"} Dec 05 06:13:44 crc kubenswrapper[4742]: I1205 06:13:44.976840 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 06:13:44 crc kubenswrapper[4742]: I1205 06:13:44.976875 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"14acb226-dcfb-4756-a0db-76a1a9419c43","Type":"ContainerDied","Data":"7606d0f90dc31b451befce2afb9d1a0631a60c2142208986c20ec5f28426d862"} Dec 05 06:13:44 crc kubenswrapper[4742]: I1205 06:13:44.976887 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"14acb226-dcfb-4756-a0db-76a1a9419c43","Type":"ContainerDied","Data":"21b0ad647ce6036c5bd57bf6a0c8be1830b363b72bfc0758bd33885ef15ff866"} Dec 05 06:13:44 crc kubenswrapper[4742]: I1205 06:13:44.976902 4742 scope.go:117] "RemoveContainer" containerID="2d8f29f2af69615f8f8e2c3e080e5fa3ced6fd16537b1131910d4513b80965eb" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.011348 4742 scope.go:117] "RemoveContainer" containerID="7606d0f90dc31b451befce2afb9d1a0631a60c2142208986c20ec5f28426d862" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.027404 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.032125 4742 scope.go:117] "RemoveContainer" containerID="2d8f29f2af69615f8f8e2c3e080e5fa3ced6fd16537b1131910d4513b80965eb" Dec 05 06:13:45 crc kubenswrapper[4742]: E1205 06:13:45.032648 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d8f29f2af69615f8f8e2c3e080e5fa3ced6fd16537b1131910d4513b80965eb\": container with ID starting with 2d8f29f2af69615f8f8e2c3e080e5fa3ced6fd16537b1131910d4513b80965eb not found: ID does not exist" containerID="2d8f29f2af69615f8f8e2c3e080e5fa3ced6fd16537b1131910d4513b80965eb" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.032681 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d8f29f2af69615f8f8e2c3e080e5fa3ced6fd16537b1131910d4513b80965eb"} err="failed to get container status \"2d8f29f2af69615f8f8e2c3e080e5fa3ced6fd16537b1131910d4513b80965eb\": rpc error: code = NotFound desc = could not find container \"2d8f29f2af69615f8f8e2c3e080e5fa3ced6fd16537b1131910d4513b80965eb\": container with ID starting with 2d8f29f2af69615f8f8e2c3e080e5fa3ced6fd16537b1131910d4513b80965eb not found: ID does not exist" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.032701 4742 scope.go:117] "RemoveContainer" containerID="7606d0f90dc31b451befce2afb9d1a0631a60c2142208986c20ec5f28426d862" Dec 05 06:13:45 crc kubenswrapper[4742]: E1205 06:13:45.033349 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7606d0f90dc31b451befce2afb9d1a0631a60c2142208986c20ec5f28426d862\": container with ID starting with 7606d0f90dc31b451befce2afb9d1a0631a60c2142208986c20ec5f28426d862 not found: ID does not exist" containerID="7606d0f90dc31b451befce2afb9d1a0631a60c2142208986c20ec5f28426d862" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.033374 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7606d0f90dc31b451befce2afb9d1a0631a60c2142208986c20ec5f28426d862"} err="failed to get container status \"7606d0f90dc31b451befce2afb9d1a0631a60c2142208986c20ec5f28426d862\": rpc error: code = NotFound desc = could not find container \"7606d0f90dc31b451befce2afb9d1a0631a60c2142208986c20ec5f28426d862\": container with ID starting with 7606d0f90dc31b451befce2afb9d1a0631a60c2142208986c20ec5f28426d862 not found: ID does not exist" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.033389 4742 scope.go:117] "RemoveContainer" containerID="2d8f29f2af69615f8f8e2c3e080e5fa3ced6fd16537b1131910d4513b80965eb" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.033674 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d8f29f2af69615f8f8e2c3e080e5fa3ced6fd16537b1131910d4513b80965eb"} err="failed to get container status \"2d8f29f2af69615f8f8e2c3e080e5fa3ced6fd16537b1131910d4513b80965eb\": rpc error: code = NotFound desc = could not find container \"2d8f29f2af69615f8f8e2c3e080e5fa3ced6fd16537b1131910d4513b80965eb\": container with ID starting with 2d8f29f2af69615f8f8e2c3e080e5fa3ced6fd16537b1131910d4513b80965eb not found: ID does not exist" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.033725 4742 scope.go:117] "RemoveContainer" containerID="7606d0f90dc31b451befce2afb9d1a0631a60c2142208986c20ec5f28426d862" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.034359 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7606d0f90dc31b451befce2afb9d1a0631a60c2142208986c20ec5f28426d862"} err="failed to get container status \"7606d0f90dc31b451befce2afb9d1a0631a60c2142208986c20ec5f28426d862\": rpc error: code = NotFound desc = could not find container \"7606d0f90dc31b451befce2afb9d1a0631a60c2142208986c20ec5f28426d862\": container with ID starting with 7606d0f90dc31b451befce2afb9d1a0631a60c2142208986c20ec5f28426d862 not found: ID does not exist" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.034456 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.055510 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 06:13:45 crc kubenswrapper[4742]: E1205 06:13:45.055889 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14acb226-dcfb-4756-a0db-76a1a9419c43" containerName="nova-metadata-metadata" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.055908 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="14acb226-dcfb-4756-a0db-76a1a9419c43" containerName="nova-metadata-metadata" Dec 05 06:13:45 crc kubenswrapper[4742]: E1205 06:13:45.055945 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14acb226-dcfb-4756-a0db-76a1a9419c43" containerName="nova-metadata-log" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.055954 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="14acb226-dcfb-4756-a0db-76a1a9419c43" containerName="nova-metadata-log" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.056203 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="14acb226-dcfb-4756-a0db-76a1a9419c43" containerName="nova-metadata-log" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.056233 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="14acb226-dcfb-4756-a0db-76a1a9419c43" containerName="nova-metadata-metadata" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.057176 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.062272 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.062614 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.072354 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.145433 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f8a57fc-bb9f-42ac-b31f-d557170ed865-config-data\") pod \"nova-metadata-0\" (UID: \"2f8a57fc-bb9f-42ac-b31f-d557170ed865\") " pod="openstack/nova-metadata-0" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.145469 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f8a57fc-bb9f-42ac-b31f-d557170ed865-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"2f8a57fc-bb9f-42ac-b31f-d557170ed865\") " pod="openstack/nova-metadata-0" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.145497 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f8a57fc-bb9f-42ac-b31f-d557170ed865-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"2f8a57fc-bb9f-42ac-b31f-d557170ed865\") " pod="openstack/nova-metadata-0" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.145520 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f8a57fc-bb9f-42ac-b31f-d557170ed865-logs\") pod \"nova-metadata-0\" (UID: \"2f8a57fc-bb9f-42ac-b31f-d557170ed865\") " pod="openstack/nova-metadata-0" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.145566 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4s4jq\" (UniqueName: \"kubernetes.io/projected/2f8a57fc-bb9f-42ac-b31f-d557170ed865-kube-api-access-4s4jq\") pod \"nova-metadata-0\" (UID: \"2f8a57fc-bb9f-42ac-b31f-d557170ed865\") " pod="openstack/nova-metadata-0" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.246884 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f8a57fc-bb9f-42ac-b31f-d557170ed865-config-data\") pod \"nova-metadata-0\" (UID: \"2f8a57fc-bb9f-42ac-b31f-d557170ed865\") " pod="openstack/nova-metadata-0" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.246923 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f8a57fc-bb9f-42ac-b31f-d557170ed865-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"2f8a57fc-bb9f-42ac-b31f-d557170ed865\") " pod="openstack/nova-metadata-0" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.246951 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f8a57fc-bb9f-42ac-b31f-d557170ed865-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"2f8a57fc-bb9f-42ac-b31f-d557170ed865\") " pod="openstack/nova-metadata-0" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.246975 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f8a57fc-bb9f-42ac-b31f-d557170ed865-logs\") pod \"nova-metadata-0\" (UID: \"2f8a57fc-bb9f-42ac-b31f-d557170ed865\") " pod="openstack/nova-metadata-0" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.247030 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4s4jq\" (UniqueName: \"kubernetes.io/projected/2f8a57fc-bb9f-42ac-b31f-d557170ed865-kube-api-access-4s4jq\") pod \"nova-metadata-0\" (UID: \"2f8a57fc-bb9f-42ac-b31f-d557170ed865\") " pod="openstack/nova-metadata-0" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.251740 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f8a57fc-bb9f-42ac-b31f-d557170ed865-logs\") pod \"nova-metadata-0\" (UID: \"2f8a57fc-bb9f-42ac-b31f-d557170ed865\") " pod="openstack/nova-metadata-0" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.253956 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f8a57fc-bb9f-42ac-b31f-d557170ed865-config-data\") pod \"nova-metadata-0\" (UID: \"2f8a57fc-bb9f-42ac-b31f-d557170ed865\") " pod="openstack/nova-metadata-0" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.254393 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f8a57fc-bb9f-42ac-b31f-d557170ed865-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"2f8a57fc-bb9f-42ac-b31f-d557170ed865\") " pod="openstack/nova-metadata-0" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.260935 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f8a57fc-bb9f-42ac-b31f-d557170ed865-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"2f8a57fc-bb9f-42ac-b31f-d557170ed865\") " pod="openstack/nova-metadata-0" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.279034 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4s4jq\" (UniqueName: \"kubernetes.io/projected/2f8a57fc-bb9f-42ac-b31f-d557170ed865-kube-api-access-4s4jq\") pod \"nova-metadata-0\" (UID: \"2f8a57fc-bb9f-42ac-b31f-d557170ed865\") " pod="openstack/nova-metadata-0" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.375909 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 06:13:45 crc kubenswrapper[4742]: I1205 06:13:45.903886 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 06:13:46 crc kubenswrapper[4742]: I1205 06:13:46.017914 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2f8a57fc-bb9f-42ac-b31f-d557170ed865","Type":"ContainerStarted","Data":"8118477a621f802d3c3ce32c6fcf46ecb5113e4c4fb6fcd45de4f6bc54756d6c"} Dec 05 06:13:46 crc kubenswrapper[4742]: I1205 06:13:46.406459 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14acb226-dcfb-4756-a0db-76a1a9419c43" path="/var/lib/kubelet/pods/14acb226-dcfb-4756-a0db-76a1a9419c43/volumes" Dec 05 06:13:47 crc kubenswrapper[4742]: I1205 06:13:47.031388 4742 generic.go:334] "Generic (PLEG): container finished" podID="29e2d29f-d9a8-4c42-b79e-4b287ec09187" containerID="96d5fa9cee2959bc805b039bf843ed8935b507be63f1df35905b848a55c83d14" exitCode=0 Dec 05 06:13:47 crc kubenswrapper[4742]: I1205 06:13:47.031455 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-4qhkv" event={"ID":"29e2d29f-d9a8-4c42-b79e-4b287ec09187","Type":"ContainerDied","Data":"96d5fa9cee2959bc805b039bf843ed8935b507be63f1df35905b848a55c83d14"} Dec 05 06:13:47 crc kubenswrapper[4742]: I1205 06:13:47.033974 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2f8a57fc-bb9f-42ac-b31f-d557170ed865","Type":"ContainerStarted","Data":"187b912c569b7210832213b6e50f9d4799f7efcb64a8d2e0d7dd58ffcca34177"} Dec 05 06:13:47 crc kubenswrapper[4742]: I1205 06:13:47.034007 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2f8a57fc-bb9f-42ac-b31f-d557170ed865","Type":"ContainerStarted","Data":"7f6b212b06d0ead992b4a5963ff3a62fcae5188359374781672b493588569d04"} Dec 05 06:13:48 crc kubenswrapper[4742]: I1205 06:13:48.049265 4742 generic.go:334] "Generic (PLEG): container finished" podID="a998c383-44cc-4b30-a27b-57860fdd3353" containerID="c929e4d68a7edf82190727f895b54f4ccde921cfe66539dcbe06759346886432" exitCode=0 Dec 05 06:13:48 crc kubenswrapper[4742]: I1205 06:13:48.049334 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-q5mkg" event={"ID":"a998c383-44cc-4b30-a27b-57860fdd3353","Type":"ContainerDied","Data":"c929e4d68a7edf82190727f895b54f4ccde921cfe66539dcbe06759346886432"} Dec 05 06:13:48 crc kubenswrapper[4742]: I1205 06:13:48.082304 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.082276776 podStartE2EDuration="3.082276776s" podCreationTimestamp="2025-12-05 06:13:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:13:47.07754835 +0000 UTC m=+1302.989683412" watchObservedRunningTime="2025-12-05 06:13:48.082276776 +0000 UTC m=+1303.994411898" Dec 05 06:13:48 crc kubenswrapper[4742]: I1205 06:13:48.472844 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 06:13:48 crc kubenswrapper[4742]: I1205 06:13:48.473370 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 06:13:48 crc kubenswrapper[4742]: I1205 06:13:48.568227 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-4qhkv" Dec 05 06:13:48 crc kubenswrapper[4742]: I1205 06:13:48.617368 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 06:13:48 crc kubenswrapper[4742]: I1205 06:13:48.632286 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29e2d29f-d9a8-4c42-b79e-4b287ec09187-scripts\") pod \"29e2d29f-d9a8-4c42-b79e-4b287ec09187\" (UID: \"29e2d29f-d9a8-4c42-b79e-4b287ec09187\") " Dec 05 06:13:48 crc kubenswrapper[4742]: I1205 06:13:48.632328 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cvmcq\" (UniqueName: \"kubernetes.io/projected/29e2d29f-d9a8-4c42-b79e-4b287ec09187-kube-api-access-cvmcq\") pod \"29e2d29f-d9a8-4c42-b79e-4b287ec09187\" (UID: \"29e2d29f-d9a8-4c42-b79e-4b287ec09187\") " Dec 05 06:13:48 crc kubenswrapper[4742]: I1205 06:13:48.632371 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29e2d29f-d9a8-4c42-b79e-4b287ec09187-combined-ca-bundle\") pod \"29e2d29f-d9a8-4c42-b79e-4b287ec09187\" (UID: \"29e2d29f-d9a8-4c42-b79e-4b287ec09187\") " Dec 05 06:13:48 crc kubenswrapper[4742]: I1205 06:13:48.632444 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29e2d29f-d9a8-4c42-b79e-4b287ec09187-config-data\") pod \"29e2d29f-d9a8-4c42-b79e-4b287ec09187\" (UID: \"29e2d29f-d9a8-4c42-b79e-4b287ec09187\") " Dec 05 06:13:48 crc kubenswrapper[4742]: I1205 06:13:48.639878 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29e2d29f-d9a8-4c42-b79e-4b287ec09187-kube-api-access-cvmcq" (OuterVolumeSpecName: "kube-api-access-cvmcq") pod "29e2d29f-d9a8-4c42-b79e-4b287ec09187" (UID: "29e2d29f-d9a8-4c42-b79e-4b287ec09187"). InnerVolumeSpecName "kube-api-access-cvmcq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:13:48 crc kubenswrapper[4742]: I1205 06:13:48.641718 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29e2d29f-d9a8-4c42-b79e-4b287ec09187-scripts" (OuterVolumeSpecName: "scripts") pod "29e2d29f-d9a8-4c42-b79e-4b287ec09187" (UID: "29e2d29f-d9a8-4c42-b79e-4b287ec09187"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:48 crc kubenswrapper[4742]: I1205 06:13:48.651968 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 06:13:48 crc kubenswrapper[4742]: I1205 06:13:48.678329 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29e2d29f-d9a8-4c42-b79e-4b287ec09187-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "29e2d29f-d9a8-4c42-b79e-4b287ec09187" (UID: "29e2d29f-d9a8-4c42-b79e-4b287ec09187"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:48 crc kubenswrapper[4742]: I1205 06:13:48.691502 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29e2d29f-d9a8-4c42-b79e-4b287ec09187-config-data" (OuterVolumeSpecName: "config-data") pod "29e2d29f-d9a8-4c42-b79e-4b287ec09187" (UID: "29e2d29f-d9a8-4c42-b79e-4b287ec09187"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:48 crc kubenswrapper[4742]: I1205 06:13:48.734856 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29e2d29f-d9a8-4c42-b79e-4b287ec09187-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:48 crc kubenswrapper[4742]: I1205 06:13:48.734899 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29e2d29f-d9a8-4c42-b79e-4b287ec09187-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:48 crc kubenswrapper[4742]: I1205 06:13:48.734913 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cvmcq\" (UniqueName: \"kubernetes.io/projected/29e2d29f-d9a8-4c42-b79e-4b287ec09187-kube-api-access-cvmcq\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:48 crc kubenswrapper[4742]: I1205 06:13:48.734926 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29e2d29f-d9a8-4c42-b79e-4b287ec09187-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:48 crc kubenswrapper[4742]: I1205 06:13:48.926456 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-bccf8f775-fp96k" Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.002973 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-46pxh"] Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.003478 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6578955fd5-46pxh" podUID="d0352ff8-ed72-463c-a7b8-3e69e23a3ea2" containerName="dnsmasq-dns" containerID="cri-o://4af5cdeba3084a95fef3915a0cce0da54f6dbf9824935a0915a46081f62f7ba4" gracePeriod=10 Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.064250 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-4qhkv" event={"ID":"29e2d29f-d9a8-4c42-b79e-4b287ec09187","Type":"ContainerDied","Data":"cf2be5438235763e916fd3873ae435f43cdababe241f1d311751676e3e9cd2e9"} Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.064306 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cf2be5438235763e916fd3873ae435f43cdababe241f1d311751676e3e9cd2e9" Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.064459 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-4qhkv" Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.112571 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.205590 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.221148 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.250211 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.250427 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="2f8a57fc-bb9f-42ac-b31f-d557170ed865" containerName="nova-metadata-log" containerID="cri-o://7f6b212b06d0ead992b4a5963ff3a62fcae5188359374781672b493588569d04" gracePeriod=30 Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.250942 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="2f8a57fc-bb9f-42ac-b31f-d557170ed865" containerName="nova-metadata-metadata" containerID="cri-o://187b912c569b7210832213b6e50f9d4799f7efcb64a8d2e0d7dd58ffcca34177" gracePeriod=30 Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.560044 4742 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="f888767f-2f9d-4514-805d-03de7aa50110" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.181:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.560139 4742 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="f888767f-2f9d-4514-805d-03de7aa50110" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.181:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.678410 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-q5mkg" Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.715004 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-46pxh" Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.864646 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-dns-swift-storage-0\") pod \"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2\" (UID: \"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2\") " Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.864720 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a998c383-44cc-4b30-a27b-57860fdd3353-config-data\") pod \"a998c383-44cc-4b30-a27b-57860fdd3353\" (UID: \"a998c383-44cc-4b30-a27b-57860fdd3353\") " Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.864746 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-ovsdbserver-nb\") pod \"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2\" (UID: \"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2\") " Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.864768 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a998c383-44cc-4b30-a27b-57860fdd3353-scripts\") pod \"a998c383-44cc-4b30-a27b-57860fdd3353\" (UID: \"a998c383-44cc-4b30-a27b-57860fdd3353\") " Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.864803 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-dns-svc\") pod \"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2\" (UID: \"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2\") " Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.864886 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k95mq\" (UniqueName: \"kubernetes.io/projected/a998c383-44cc-4b30-a27b-57860fdd3353-kube-api-access-k95mq\") pod \"a998c383-44cc-4b30-a27b-57860fdd3353\" (UID: \"a998c383-44cc-4b30-a27b-57860fdd3353\") " Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.864984 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-ovsdbserver-sb\") pod \"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2\" (UID: \"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2\") " Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.865034 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a998c383-44cc-4b30-a27b-57860fdd3353-combined-ca-bundle\") pod \"a998c383-44cc-4b30-a27b-57860fdd3353\" (UID: \"a998c383-44cc-4b30-a27b-57860fdd3353\") " Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.865156 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-config\") pod \"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2\" (UID: \"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2\") " Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.865187 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z49bc\" (UniqueName: \"kubernetes.io/projected/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-kube-api-access-z49bc\") pod \"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2\" (UID: \"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2\") " Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.872289 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-kube-api-access-z49bc" (OuterVolumeSpecName: "kube-api-access-z49bc") pod "d0352ff8-ed72-463c-a7b8-3e69e23a3ea2" (UID: "d0352ff8-ed72-463c-a7b8-3e69e23a3ea2"). InnerVolumeSpecName "kube-api-access-z49bc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.879493 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a998c383-44cc-4b30-a27b-57860fdd3353-kube-api-access-k95mq" (OuterVolumeSpecName: "kube-api-access-k95mq") pod "a998c383-44cc-4b30-a27b-57860fdd3353" (UID: "a998c383-44cc-4b30-a27b-57860fdd3353"). InnerVolumeSpecName "kube-api-access-k95mq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.885342 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a998c383-44cc-4b30-a27b-57860fdd3353-scripts" (OuterVolumeSpecName: "scripts") pod "a998c383-44cc-4b30-a27b-57860fdd3353" (UID: "a998c383-44cc-4b30-a27b-57860fdd3353"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.923297 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.953314 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d0352ff8-ed72-463c-a7b8-3e69e23a3ea2" (UID: "d0352ff8-ed72-463c-a7b8-3e69e23a3ea2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.967551 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d0352ff8-ed72-463c-a7b8-3e69e23a3ea2" (UID: "d0352ff8-ed72-463c-a7b8-3e69e23a3ea2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.968852 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k95mq\" (UniqueName: \"kubernetes.io/projected/a998c383-44cc-4b30-a27b-57860fdd3353-kube-api-access-k95mq\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.968878 4742 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.968888 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z49bc\" (UniqueName: \"kubernetes.io/projected/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-kube-api-access-z49bc\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.968900 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a998c383-44cc-4b30-a27b-57860fdd3353-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:49 crc kubenswrapper[4742]: I1205 06:13:49.968911 4742 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.000844 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a998c383-44cc-4b30-a27b-57860fdd3353-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a998c383-44cc-4b30-a27b-57860fdd3353" (UID: "a998c383-44cc-4b30-a27b-57860fdd3353"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.043622 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d0352ff8-ed72-463c-a7b8-3e69e23a3ea2" (UID: "d0352ff8-ed72-463c-a7b8-3e69e23a3ea2"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.055179 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-config" (OuterVolumeSpecName: "config") pod "d0352ff8-ed72-463c-a7b8-3e69e23a3ea2" (UID: "d0352ff8-ed72-463c-a7b8-3e69e23a3ea2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.068445 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a998c383-44cc-4b30-a27b-57860fdd3353-config-data" (OuterVolumeSpecName: "config-data") pod "a998c383-44cc-4b30-a27b-57860fdd3353" (UID: "a998c383-44cc-4b30-a27b-57860fdd3353"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.071766 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f8a57fc-bb9f-42ac-b31f-d557170ed865-nova-metadata-tls-certs\") pod \"2f8a57fc-bb9f-42ac-b31f-d557170ed865\" (UID: \"2f8a57fc-bb9f-42ac-b31f-d557170ed865\") " Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.071821 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f8a57fc-bb9f-42ac-b31f-d557170ed865-config-data\") pod \"2f8a57fc-bb9f-42ac-b31f-d557170ed865\" (UID: \"2f8a57fc-bb9f-42ac-b31f-d557170ed865\") " Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.071966 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4s4jq\" (UniqueName: \"kubernetes.io/projected/2f8a57fc-bb9f-42ac-b31f-d557170ed865-kube-api-access-4s4jq\") pod \"2f8a57fc-bb9f-42ac-b31f-d557170ed865\" (UID: \"2f8a57fc-bb9f-42ac-b31f-d557170ed865\") " Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.072042 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f8a57fc-bb9f-42ac-b31f-d557170ed865-logs\") pod \"2f8a57fc-bb9f-42ac-b31f-d557170ed865\" (UID: \"2f8a57fc-bb9f-42ac-b31f-d557170ed865\") " Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.072151 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f8a57fc-bb9f-42ac-b31f-d557170ed865-combined-ca-bundle\") pod \"2f8a57fc-bb9f-42ac-b31f-d557170ed865\" (UID: \"2f8a57fc-bb9f-42ac-b31f-d557170ed865\") " Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.072632 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a998c383-44cc-4b30-a27b-57860fdd3353-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.072664 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.072678 4742 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.072690 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a998c383-44cc-4b30-a27b-57860fdd3353-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.078519 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f8a57fc-bb9f-42ac-b31f-d557170ed865-logs" (OuterVolumeSpecName: "logs") pod "2f8a57fc-bb9f-42ac-b31f-d557170ed865" (UID: "2f8a57fc-bb9f-42ac-b31f-d557170ed865"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.092348 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f8a57fc-bb9f-42ac-b31f-d557170ed865-kube-api-access-4s4jq" (OuterVolumeSpecName: "kube-api-access-4s4jq") pod "2f8a57fc-bb9f-42ac-b31f-d557170ed865" (UID: "2f8a57fc-bb9f-42ac-b31f-d557170ed865"). InnerVolumeSpecName "kube-api-access-4s4jq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.093582 4742 generic.go:334] "Generic (PLEG): container finished" podID="d0352ff8-ed72-463c-a7b8-3e69e23a3ea2" containerID="4af5cdeba3084a95fef3915a0cce0da54f6dbf9824935a0915a46081f62f7ba4" exitCode=0 Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.093682 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-46pxh" event={"ID":"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2","Type":"ContainerDied","Data":"4af5cdeba3084a95fef3915a0cce0da54f6dbf9824935a0915a46081f62f7ba4"} Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.093728 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-46pxh" event={"ID":"d0352ff8-ed72-463c-a7b8-3e69e23a3ea2","Type":"ContainerDied","Data":"1347c934f24ff35b4b873199618519b4ccd8b96b0fa2279e16092c71d3302806"} Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.093745 4742 scope.go:117] "RemoveContainer" containerID="4af5cdeba3084a95fef3915a0cce0da54f6dbf9824935a0915a46081f62f7ba4" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.093914 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-46pxh" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.114656 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-q5mkg" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.114725 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-q5mkg" event={"ID":"a998c383-44cc-4b30-a27b-57860fdd3353","Type":"ContainerDied","Data":"c76a85dbaf279144e289f5493afb04b878a7a9b77cf83a0fcf378dd8ef5b1634"} Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.114769 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c76a85dbaf279144e289f5493afb04b878a7a9b77cf83a0fcf378dd8ef5b1634" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.115658 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d0352ff8-ed72-463c-a7b8-3e69e23a3ea2" (UID: "d0352ff8-ed72-463c-a7b8-3e69e23a3ea2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.141211 4742 scope.go:117] "RemoveContainer" containerID="f0c13babb3b0d83117243ce9f5757a86bcd5049dbd2b26f9e68d5e00c8341438" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.142914 4742 generic.go:334] "Generic (PLEG): container finished" podID="2f8a57fc-bb9f-42ac-b31f-d557170ed865" containerID="187b912c569b7210832213b6e50f9d4799f7efcb64a8d2e0d7dd58ffcca34177" exitCode=0 Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.142942 4742 generic.go:334] "Generic (PLEG): container finished" podID="2f8a57fc-bb9f-42ac-b31f-d557170ed865" containerID="7f6b212b06d0ead992b4a5963ff3a62fcae5188359374781672b493588569d04" exitCode=143 Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.143140 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="f888767f-2f9d-4514-805d-03de7aa50110" containerName="nova-api-log" containerID="cri-o://4064bfec7b7a22ff0dc6d89181766eff0375f0fa5f51c04e27c079da039a03be" gracePeriod=30 Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.143226 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="f888767f-2f9d-4514-805d-03de7aa50110" containerName="nova-api-api" containerID="cri-o://038b9da41e74aa1b29139a01cebf7e8813d1faac7980048b46e40c9aaa797eb7" gracePeriod=30 Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.143289 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.143801 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2f8a57fc-bb9f-42ac-b31f-d557170ed865","Type":"ContainerDied","Data":"187b912c569b7210832213b6e50f9d4799f7efcb64a8d2e0d7dd58ffcca34177"} Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.144108 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2f8a57fc-bb9f-42ac-b31f-d557170ed865","Type":"ContainerDied","Data":"7f6b212b06d0ead992b4a5963ff3a62fcae5188359374781672b493588569d04"} Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.144182 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"2f8a57fc-bb9f-42ac-b31f-d557170ed865","Type":"ContainerDied","Data":"8118477a621f802d3c3ce32c6fcf46ecb5113e4c4fb6fcd45de4f6bc54756d6c"} Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.152831 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f8a57fc-bb9f-42ac-b31f-d557170ed865-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2f8a57fc-bb9f-42ac-b31f-d557170ed865" (UID: "2f8a57fc-bb9f-42ac-b31f-d557170ed865"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.168740 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f8a57fc-bb9f-42ac-b31f-d557170ed865-config-data" (OuterVolumeSpecName: "config-data") pod "2f8a57fc-bb9f-42ac-b31f-d557170ed865" (UID: "2f8a57fc-bb9f-42ac-b31f-d557170ed865"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.175250 4742 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.175281 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4s4jq\" (UniqueName: \"kubernetes.io/projected/2f8a57fc-bb9f-42ac-b31f-d557170ed865-kube-api-access-4s4jq\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.175293 4742 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f8a57fc-bb9f-42ac-b31f-d557170ed865-logs\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.175302 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f8a57fc-bb9f-42ac-b31f-d557170ed865-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.175312 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f8a57fc-bb9f-42ac-b31f-d557170ed865-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.194360 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 06:13:50 crc kubenswrapper[4742]: E1205 06:13:50.194819 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0352ff8-ed72-463c-a7b8-3e69e23a3ea2" containerName="init" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.194839 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0352ff8-ed72-463c-a7b8-3e69e23a3ea2" containerName="init" Dec 05 06:13:50 crc kubenswrapper[4742]: E1205 06:13:50.194856 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a998c383-44cc-4b30-a27b-57860fdd3353" containerName="nova-cell1-conductor-db-sync" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.194863 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="a998c383-44cc-4b30-a27b-57860fdd3353" containerName="nova-cell1-conductor-db-sync" Dec 05 06:13:50 crc kubenswrapper[4742]: E1205 06:13:50.194880 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f8a57fc-bb9f-42ac-b31f-d557170ed865" containerName="nova-metadata-metadata" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.194900 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f8a57fc-bb9f-42ac-b31f-d557170ed865" containerName="nova-metadata-metadata" Dec 05 06:13:50 crc kubenswrapper[4742]: E1205 06:13:50.194926 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29e2d29f-d9a8-4c42-b79e-4b287ec09187" containerName="nova-manage" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.194935 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="29e2d29f-d9a8-4c42-b79e-4b287ec09187" containerName="nova-manage" Dec 05 06:13:50 crc kubenswrapper[4742]: E1205 06:13:50.194942 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0352ff8-ed72-463c-a7b8-3e69e23a3ea2" containerName="dnsmasq-dns" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.194948 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0352ff8-ed72-463c-a7b8-3e69e23a3ea2" containerName="dnsmasq-dns" Dec 05 06:13:50 crc kubenswrapper[4742]: E1205 06:13:50.194963 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f8a57fc-bb9f-42ac-b31f-d557170ed865" containerName="nova-metadata-log" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.194969 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f8a57fc-bb9f-42ac-b31f-d557170ed865" containerName="nova-metadata-log" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.195198 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="a998c383-44cc-4b30-a27b-57860fdd3353" containerName="nova-cell1-conductor-db-sync" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.195218 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f8a57fc-bb9f-42ac-b31f-d557170ed865" containerName="nova-metadata-log" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.195232 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0352ff8-ed72-463c-a7b8-3e69e23a3ea2" containerName="dnsmasq-dns" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.195246 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="29e2d29f-d9a8-4c42-b79e-4b287ec09187" containerName="nova-manage" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.195265 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f8a57fc-bb9f-42ac-b31f-d557170ed865" containerName="nova-metadata-metadata" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.195877 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.202098 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.204900 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.218215 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f8a57fc-bb9f-42ac-b31f-d557170ed865-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "2f8a57fc-bb9f-42ac-b31f-d557170ed865" (UID: "2f8a57fc-bb9f-42ac-b31f-d557170ed865"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.229206 4742 scope.go:117] "RemoveContainer" containerID="4af5cdeba3084a95fef3915a0cce0da54f6dbf9824935a0915a46081f62f7ba4" Dec 05 06:13:50 crc kubenswrapper[4742]: E1205 06:13:50.230450 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4af5cdeba3084a95fef3915a0cce0da54f6dbf9824935a0915a46081f62f7ba4\": container with ID starting with 4af5cdeba3084a95fef3915a0cce0da54f6dbf9824935a0915a46081f62f7ba4 not found: ID does not exist" containerID="4af5cdeba3084a95fef3915a0cce0da54f6dbf9824935a0915a46081f62f7ba4" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.230479 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4af5cdeba3084a95fef3915a0cce0da54f6dbf9824935a0915a46081f62f7ba4"} err="failed to get container status \"4af5cdeba3084a95fef3915a0cce0da54f6dbf9824935a0915a46081f62f7ba4\": rpc error: code = NotFound desc = could not find container \"4af5cdeba3084a95fef3915a0cce0da54f6dbf9824935a0915a46081f62f7ba4\": container with ID starting with 4af5cdeba3084a95fef3915a0cce0da54f6dbf9824935a0915a46081f62f7ba4 not found: ID does not exist" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.230498 4742 scope.go:117] "RemoveContainer" containerID="f0c13babb3b0d83117243ce9f5757a86bcd5049dbd2b26f9e68d5e00c8341438" Dec 05 06:13:50 crc kubenswrapper[4742]: E1205 06:13:50.231545 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0c13babb3b0d83117243ce9f5757a86bcd5049dbd2b26f9e68d5e00c8341438\": container with ID starting with f0c13babb3b0d83117243ce9f5757a86bcd5049dbd2b26f9e68d5e00c8341438 not found: ID does not exist" containerID="f0c13babb3b0d83117243ce9f5757a86bcd5049dbd2b26f9e68d5e00c8341438" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.231579 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0c13babb3b0d83117243ce9f5757a86bcd5049dbd2b26f9e68d5e00c8341438"} err="failed to get container status \"f0c13babb3b0d83117243ce9f5757a86bcd5049dbd2b26f9e68d5e00c8341438\": rpc error: code = NotFound desc = could not find container \"f0c13babb3b0d83117243ce9f5757a86bcd5049dbd2b26f9e68d5e00c8341438\": container with ID starting with f0c13babb3b0d83117243ce9f5757a86bcd5049dbd2b26f9e68d5e00c8341438 not found: ID does not exist" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.231596 4742 scope.go:117] "RemoveContainer" containerID="187b912c569b7210832213b6e50f9d4799f7efcb64a8d2e0d7dd58ffcca34177" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.267903 4742 scope.go:117] "RemoveContainer" containerID="7f6b212b06d0ead992b4a5963ff3a62fcae5188359374781672b493588569d04" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.276386 4742 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f8a57fc-bb9f-42ac-b31f-d557170ed865-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.294034 4742 scope.go:117] "RemoveContainer" containerID="187b912c569b7210832213b6e50f9d4799f7efcb64a8d2e0d7dd58ffcca34177" Dec 05 06:13:50 crc kubenswrapper[4742]: E1205 06:13:50.294791 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"187b912c569b7210832213b6e50f9d4799f7efcb64a8d2e0d7dd58ffcca34177\": container with ID starting with 187b912c569b7210832213b6e50f9d4799f7efcb64a8d2e0d7dd58ffcca34177 not found: ID does not exist" containerID="187b912c569b7210832213b6e50f9d4799f7efcb64a8d2e0d7dd58ffcca34177" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.294843 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"187b912c569b7210832213b6e50f9d4799f7efcb64a8d2e0d7dd58ffcca34177"} err="failed to get container status \"187b912c569b7210832213b6e50f9d4799f7efcb64a8d2e0d7dd58ffcca34177\": rpc error: code = NotFound desc = could not find container \"187b912c569b7210832213b6e50f9d4799f7efcb64a8d2e0d7dd58ffcca34177\": container with ID starting with 187b912c569b7210832213b6e50f9d4799f7efcb64a8d2e0d7dd58ffcca34177 not found: ID does not exist" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.294884 4742 scope.go:117] "RemoveContainer" containerID="7f6b212b06d0ead992b4a5963ff3a62fcae5188359374781672b493588569d04" Dec 05 06:13:50 crc kubenswrapper[4742]: E1205 06:13:50.297298 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f6b212b06d0ead992b4a5963ff3a62fcae5188359374781672b493588569d04\": container with ID starting with 7f6b212b06d0ead992b4a5963ff3a62fcae5188359374781672b493588569d04 not found: ID does not exist" containerID="7f6b212b06d0ead992b4a5963ff3a62fcae5188359374781672b493588569d04" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.297346 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f6b212b06d0ead992b4a5963ff3a62fcae5188359374781672b493588569d04"} err="failed to get container status \"7f6b212b06d0ead992b4a5963ff3a62fcae5188359374781672b493588569d04\": rpc error: code = NotFound desc = could not find container \"7f6b212b06d0ead992b4a5963ff3a62fcae5188359374781672b493588569d04\": container with ID starting with 7f6b212b06d0ead992b4a5963ff3a62fcae5188359374781672b493588569d04 not found: ID does not exist" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.297373 4742 scope.go:117] "RemoveContainer" containerID="187b912c569b7210832213b6e50f9d4799f7efcb64a8d2e0d7dd58ffcca34177" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.297907 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"187b912c569b7210832213b6e50f9d4799f7efcb64a8d2e0d7dd58ffcca34177"} err="failed to get container status \"187b912c569b7210832213b6e50f9d4799f7efcb64a8d2e0d7dd58ffcca34177\": rpc error: code = NotFound desc = could not find container \"187b912c569b7210832213b6e50f9d4799f7efcb64a8d2e0d7dd58ffcca34177\": container with ID starting with 187b912c569b7210832213b6e50f9d4799f7efcb64a8d2e0d7dd58ffcca34177 not found: ID does not exist" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.297939 4742 scope.go:117] "RemoveContainer" containerID="7f6b212b06d0ead992b4a5963ff3a62fcae5188359374781672b493588569d04" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.298197 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f6b212b06d0ead992b4a5963ff3a62fcae5188359374781672b493588569d04"} err="failed to get container status \"7f6b212b06d0ead992b4a5963ff3a62fcae5188359374781672b493588569d04\": rpc error: code = NotFound desc = could not find container \"7f6b212b06d0ead992b4a5963ff3a62fcae5188359374781672b493588569d04\": container with ID starting with 7f6b212b06d0ead992b4a5963ff3a62fcae5188359374781672b493588569d04 not found: ID does not exist" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.377638 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2k29j\" (UniqueName: \"kubernetes.io/projected/a1b7e898-ff4e-4523-8602-18d5937c3e5f-kube-api-access-2k29j\") pod \"nova-cell1-conductor-0\" (UID: \"a1b7e898-ff4e-4523-8602-18d5937c3e5f\") " pod="openstack/nova-cell1-conductor-0" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.377764 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1b7e898-ff4e-4523-8602-18d5937c3e5f-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"a1b7e898-ff4e-4523-8602-18d5937c3e5f\") " pod="openstack/nova-cell1-conductor-0" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.377802 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1b7e898-ff4e-4523-8602-18d5937c3e5f-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"a1b7e898-ff4e-4523-8602-18d5937c3e5f\") " pod="openstack/nova-cell1-conductor-0" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.433928 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-46pxh"] Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.440697 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-46pxh"] Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.470976 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.479080 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1b7e898-ff4e-4523-8602-18d5937c3e5f-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"a1b7e898-ff4e-4523-8602-18d5937c3e5f\") " pod="openstack/nova-cell1-conductor-0" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.479144 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1b7e898-ff4e-4523-8602-18d5937c3e5f-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"a1b7e898-ff4e-4523-8602-18d5937c3e5f\") " pod="openstack/nova-cell1-conductor-0" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.479238 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2k29j\" (UniqueName: \"kubernetes.io/projected/a1b7e898-ff4e-4523-8602-18d5937c3e5f-kube-api-access-2k29j\") pod \"nova-cell1-conductor-0\" (UID: \"a1b7e898-ff4e-4523-8602-18d5937c3e5f\") " pod="openstack/nova-cell1-conductor-0" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.485791 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.487922 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1b7e898-ff4e-4523-8602-18d5937c3e5f-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"a1b7e898-ff4e-4523-8602-18d5937c3e5f\") " pod="openstack/nova-cell1-conductor-0" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.488607 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1b7e898-ff4e-4523-8602-18d5937c3e5f-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"a1b7e898-ff4e-4523-8602-18d5937c3e5f\") " pod="openstack/nova-cell1-conductor-0" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.493567 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.495541 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.499382 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.499558 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.505818 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.511606 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2k29j\" (UniqueName: \"kubernetes.io/projected/a1b7e898-ff4e-4523-8602-18d5937c3e5f-kube-api-access-2k29j\") pod \"nova-cell1-conductor-0\" (UID: \"a1b7e898-ff4e-4523-8602-18d5937c3e5f\") " pod="openstack/nova-cell1-conductor-0" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.544816 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.683374 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ce4fb02-31bd-4877-8d52-bad17bc4306d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5ce4fb02-31bd-4877-8d52-bad17bc4306d\") " pod="openstack/nova-metadata-0" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.684115 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zr7m\" (UniqueName: \"kubernetes.io/projected/5ce4fb02-31bd-4877-8d52-bad17bc4306d-kube-api-access-6zr7m\") pod \"nova-metadata-0\" (UID: \"5ce4fb02-31bd-4877-8d52-bad17bc4306d\") " pod="openstack/nova-metadata-0" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.684175 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ce4fb02-31bd-4877-8d52-bad17bc4306d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5ce4fb02-31bd-4877-8d52-bad17bc4306d\") " pod="openstack/nova-metadata-0" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.684271 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ce4fb02-31bd-4877-8d52-bad17bc4306d-config-data\") pod \"nova-metadata-0\" (UID: \"5ce4fb02-31bd-4877-8d52-bad17bc4306d\") " pod="openstack/nova-metadata-0" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.684332 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ce4fb02-31bd-4877-8d52-bad17bc4306d-logs\") pod \"nova-metadata-0\" (UID: \"5ce4fb02-31bd-4877-8d52-bad17bc4306d\") " pod="openstack/nova-metadata-0" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.786480 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ce4fb02-31bd-4877-8d52-bad17bc4306d-config-data\") pod \"nova-metadata-0\" (UID: \"5ce4fb02-31bd-4877-8d52-bad17bc4306d\") " pod="openstack/nova-metadata-0" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.786533 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ce4fb02-31bd-4877-8d52-bad17bc4306d-logs\") pod \"nova-metadata-0\" (UID: \"5ce4fb02-31bd-4877-8d52-bad17bc4306d\") " pod="openstack/nova-metadata-0" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.786585 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ce4fb02-31bd-4877-8d52-bad17bc4306d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5ce4fb02-31bd-4877-8d52-bad17bc4306d\") " pod="openstack/nova-metadata-0" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.786647 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zr7m\" (UniqueName: \"kubernetes.io/projected/5ce4fb02-31bd-4877-8d52-bad17bc4306d-kube-api-access-6zr7m\") pod \"nova-metadata-0\" (UID: \"5ce4fb02-31bd-4877-8d52-bad17bc4306d\") " pod="openstack/nova-metadata-0" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.786676 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ce4fb02-31bd-4877-8d52-bad17bc4306d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5ce4fb02-31bd-4877-8d52-bad17bc4306d\") " pod="openstack/nova-metadata-0" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.787319 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ce4fb02-31bd-4877-8d52-bad17bc4306d-logs\") pod \"nova-metadata-0\" (UID: \"5ce4fb02-31bd-4877-8d52-bad17bc4306d\") " pod="openstack/nova-metadata-0" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.791011 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ce4fb02-31bd-4877-8d52-bad17bc4306d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5ce4fb02-31bd-4877-8d52-bad17bc4306d\") " pod="openstack/nova-metadata-0" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.791130 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ce4fb02-31bd-4877-8d52-bad17bc4306d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5ce4fb02-31bd-4877-8d52-bad17bc4306d\") " pod="openstack/nova-metadata-0" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.791852 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ce4fb02-31bd-4877-8d52-bad17bc4306d-config-data\") pod \"nova-metadata-0\" (UID: \"5ce4fb02-31bd-4877-8d52-bad17bc4306d\") " pod="openstack/nova-metadata-0" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.802674 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zr7m\" (UniqueName: \"kubernetes.io/projected/5ce4fb02-31bd-4877-8d52-bad17bc4306d-kube-api-access-6zr7m\") pod \"nova-metadata-0\" (UID: \"5ce4fb02-31bd-4877-8d52-bad17bc4306d\") " pod="openstack/nova-metadata-0" Dec 05 06:13:50 crc kubenswrapper[4742]: I1205 06:13:50.984911 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 06:13:51 crc kubenswrapper[4742]: I1205 06:13:51.037356 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 06:13:51 crc kubenswrapper[4742]: W1205 06:13:51.050218 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda1b7e898_ff4e_4523_8602_18d5937c3e5f.slice/crio-88478d3ec4837dfb5acf43c989b2eebe0430c67c0ea3d138798bc84335221a76 WatchSource:0}: Error finding container 88478d3ec4837dfb5acf43c989b2eebe0430c67c0ea3d138798bc84335221a76: Status 404 returned error can't find the container with id 88478d3ec4837dfb5acf43c989b2eebe0430c67c0ea3d138798bc84335221a76 Dec 05 06:13:51 crc kubenswrapper[4742]: I1205 06:13:51.159102 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"a1b7e898-ff4e-4523-8602-18d5937c3e5f","Type":"ContainerStarted","Data":"88478d3ec4837dfb5acf43c989b2eebe0430c67c0ea3d138798bc84335221a76"} Dec 05 06:13:51 crc kubenswrapper[4742]: I1205 06:13:51.164829 4742 generic.go:334] "Generic (PLEG): container finished" podID="f888767f-2f9d-4514-805d-03de7aa50110" containerID="4064bfec7b7a22ff0dc6d89181766eff0375f0fa5f51c04e27c079da039a03be" exitCode=143 Dec 05 06:13:51 crc kubenswrapper[4742]: I1205 06:13:51.164896 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f888767f-2f9d-4514-805d-03de7aa50110","Type":"ContainerDied","Data":"4064bfec7b7a22ff0dc6d89181766eff0375f0fa5f51c04e27c079da039a03be"} Dec 05 06:13:51 crc kubenswrapper[4742]: I1205 06:13:51.165026 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="d4e8c4f7-a7ad-4b52-a835-2dc38deb0020" containerName="nova-scheduler-scheduler" containerID="cri-o://5e1631c3bb7fc93b7035df6f05e8a76a65e7e94a83bafa1d1089a76287823bf8" gracePeriod=30 Dec 05 06:13:51 crc kubenswrapper[4742]: I1205 06:13:51.463620 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 06:13:51 crc kubenswrapper[4742]: W1205 06:13:51.465243 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5ce4fb02_31bd_4877_8d52_bad17bc4306d.slice/crio-d135163237b8d5d6a27dd4329967bbc24a91ffa614592a9818a291114947ea8a WatchSource:0}: Error finding container d135163237b8d5d6a27dd4329967bbc24a91ffa614592a9818a291114947ea8a: Status 404 returned error can't find the container with id d135163237b8d5d6a27dd4329967bbc24a91ffa614592a9818a291114947ea8a Dec 05 06:13:52 crc kubenswrapper[4742]: I1205 06:13:52.023653 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 06:13:52 crc kubenswrapper[4742]: I1205 06:13:52.179547 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"a1b7e898-ff4e-4523-8602-18d5937c3e5f","Type":"ContainerStarted","Data":"37ce92fb4c8b7e7a04ce6e6e4b58a8a399c55f7403d7878e536352ac8232319e"} Dec 05 06:13:52 crc kubenswrapper[4742]: I1205 06:13:52.179997 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 05 06:13:52 crc kubenswrapper[4742]: I1205 06:13:52.182326 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5ce4fb02-31bd-4877-8d52-bad17bc4306d","Type":"ContainerStarted","Data":"6d12cb692f32a51c0a1e6bbc3a8ec059f969495d4fb0f12114196ffb3d5c92c2"} Dec 05 06:13:52 crc kubenswrapper[4742]: I1205 06:13:52.182391 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5ce4fb02-31bd-4877-8d52-bad17bc4306d","Type":"ContainerStarted","Data":"9e47faf85142d7378d64203f2694df0654007f888bcb3decf5297b3d4ac30e97"} Dec 05 06:13:52 crc kubenswrapper[4742]: I1205 06:13:52.182410 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5ce4fb02-31bd-4877-8d52-bad17bc4306d","Type":"ContainerStarted","Data":"d135163237b8d5d6a27dd4329967bbc24a91ffa614592a9818a291114947ea8a"} Dec 05 06:13:52 crc kubenswrapper[4742]: I1205 06:13:52.211001 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.210973555 podStartE2EDuration="2.210973555s" podCreationTimestamp="2025-12-05 06:13:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:13:52.202631675 +0000 UTC m=+1308.114766737" watchObservedRunningTime="2025-12-05 06:13:52.210973555 +0000 UTC m=+1308.123108627" Dec 05 06:13:52 crc kubenswrapper[4742]: I1205 06:13:52.399370 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f8a57fc-bb9f-42ac-b31f-d557170ed865" path="/var/lib/kubelet/pods/2f8a57fc-bb9f-42ac-b31f-d557170ed865/volumes" Dec 05 06:13:52 crc kubenswrapper[4742]: I1205 06:13:52.400184 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0352ff8-ed72-463c-a7b8-3e69e23a3ea2" path="/var/lib/kubelet/pods/d0352ff8-ed72-463c-a7b8-3e69e23a3ea2/volumes" Dec 05 06:13:53 crc kubenswrapper[4742]: E1205 06:13:53.620555 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5e1631c3bb7fc93b7035df6f05e8a76a65e7e94a83bafa1d1089a76287823bf8" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 06:13:53 crc kubenswrapper[4742]: E1205 06:13:53.622396 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5e1631c3bb7fc93b7035df6f05e8a76a65e7e94a83bafa1d1089a76287823bf8" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 06:13:53 crc kubenswrapper[4742]: E1205 06:13:53.624339 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5e1631c3bb7fc93b7035df6f05e8a76a65e7e94a83bafa1d1089a76287823bf8" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 06:13:53 crc kubenswrapper[4742]: E1205 06:13:53.624389 4742 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="d4e8c4f7-a7ad-4b52-a835-2dc38deb0020" containerName="nova-scheduler-scheduler" Dec 05 06:13:54 crc kubenswrapper[4742]: I1205 06:13:54.207013 4742 generic.go:334] "Generic (PLEG): container finished" podID="d4e8c4f7-a7ad-4b52-a835-2dc38deb0020" containerID="5e1631c3bb7fc93b7035df6f05e8a76a65e7e94a83bafa1d1089a76287823bf8" exitCode=0 Dec 05 06:13:54 crc kubenswrapper[4742]: I1205 06:13:54.207311 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d4e8c4f7-a7ad-4b52-a835-2dc38deb0020","Type":"ContainerDied","Data":"5e1631c3bb7fc93b7035df6f05e8a76a65e7e94a83bafa1d1089a76287823bf8"} Dec 05 06:13:54 crc kubenswrapper[4742]: I1205 06:13:54.338846 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 06:13:54 crc kubenswrapper[4742]: I1205 06:13:54.353798 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=4.3537831350000005 podStartE2EDuration="4.353783135s" podCreationTimestamp="2025-12-05 06:13:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:13:52.234031254 +0000 UTC m=+1308.146166306" watchObservedRunningTime="2025-12-05 06:13:54.353783135 +0000 UTC m=+1310.265918197" Dec 05 06:13:54 crc kubenswrapper[4742]: I1205 06:13:54.468309 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4e8c4f7-a7ad-4b52-a835-2dc38deb0020-combined-ca-bundle\") pod \"d4e8c4f7-a7ad-4b52-a835-2dc38deb0020\" (UID: \"d4e8c4f7-a7ad-4b52-a835-2dc38deb0020\") " Dec 05 06:13:54 crc kubenswrapper[4742]: I1205 06:13:54.468376 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4e8c4f7-a7ad-4b52-a835-2dc38deb0020-config-data\") pod \"d4e8c4f7-a7ad-4b52-a835-2dc38deb0020\" (UID: \"d4e8c4f7-a7ad-4b52-a835-2dc38deb0020\") " Dec 05 06:13:54 crc kubenswrapper[4742]: I1205 06:13:54.468450 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hs97d\" (UniqueName: \"kubernetes.io/projected/d4e8c4f7-a7ad-4b52-a835-2dc38deb0020-kube-api-access-hs97d\") pod \"d4e8c4f7-a7ad-4b52-a835-2dc38deb0020\" (UID: \"d4e8c4f7-a7ad-4b52-a835-2dc38deb0020\") " Dec 05 06:13:54 crc kubenswrapper[4742]: I1205 06:13:54.473419 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4e8c4f7-a7ad-4b52-a835-2dc38deb0020-kube-api-access-hs97d" (OuterVolumeSpecName: "kube-api-access-hs97d") pod "d4e8c4f7-a7ad-4b52-a835-2dc38deb0020" (UID: "d4e8c4f7-a7ad-4b52-a835-2dc38deb0020"). InnerVolumeSpecName "kube-api-access-hs97d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:13:54 crc kubenswrapper[4742]: I1205 06:13:54.498421 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4e8c4f7-a7ad-4b52-a835-2dc38deb0020-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d4e8c4f7-a7ad-4b52-a835-2dc38deb0020" (UID: "d4e8c4f7-a7ad-4b52-a835-2dc38deb0020"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:54 crc kubenswrapper[4742]: I1205 06:13:54.522178 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4e8c4f7-a7ad-4b52-a835-2dc38deb0020-config-data" (OuterVolumeSpecName: "config-data") pod "d4e8c4f7-a7ad-4b52-a835-2dc38deb0020" (UID: "d4e8c4f7-a7ad-4b52-a835-2dc38deb0020"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:54 crc kubenswrapper[4742]: I1205 06:13:54.570434 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4e8c4f7-a7ad-4b52-a835-2dc38deb0020-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:54 crc kubenswrapper[4742]: I1205 06:13:54.570464 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4e8c4f7-a7ad-4b52-a835-2dc38deb0020-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:54 crc kubenswrapper[4742]: I1205 06:13:54.570474 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hs97d\" (UniqueName: \"kubernetes.io/projected/d4e8c4f7-a7ad-4b52-a835-2dc38deb0020-kube-api-access-hs97d\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:55 crc kubenswrapper[4742]: I1205 06:13:55.216032 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d4e8c4f7-a7ad-4b52-a835-2dc38deb0020","Type":"ContainerDied","Data":"81e45b9f087bd41ae5b55da27e70844c77d6b3be857311fddbf51333cdfc8710"} Dec 05 06:13:55 crc kubenswrapper[4742]: I1205 06:13:55.216098 4742 scope.go:117] "RemoveContainer" containerID="5e1631c3bb7fc93b7035df6f05e8a76a65e7e94a83bafa1d1089a76287823bf8" Dec 05 06:13:55 crc kubenswrapper[4742]: I1205 06:13:55.216195 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 06:13:55 crc kubenswrapper[4742]: I1205 06:13:55.257961 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 06:13:55 crc kubenswrapper[4742]: I1205 06:13:55.275409 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 06:13:55 crc kubenswrapper[4742]: I1205 06:13:55.289953 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 06:13:55 crc kubenswrapper[4742]: E1205 06:13:55.290539 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4e8c4f7-a7ad-4b52-a835-2dc38deb0020" containerName="nova-scheduler-scheduler" Dec 05 06:13:55 crc kubenswrapper[4742]: I1205 06:13:55.290560 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4e8c4f7-a7ad-4b52-a835-2dc38deb0020" containerName="nova-scheduler-scheduler" Dec 05 06:13:55 crc kubenswrapper[4742]: I1205 06:13:55.290776 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4e8c4f7-a7ad-4b52-a835-2dc38deb0020" containerName="nova-scheduler-scheduler" Dec 05 06:13:55 crc kubenswrapper[4742]: I1205 06:13:55.291538 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 06:13:55 crc kubenswrapper[4742]: I1205 06:13:55.300163 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 06:13:55 crc kubenswrapper[4742]: I1205 06:13:55.300691 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 06:13:55 crc kubenswrapper[4742]: I1205 06:13:55.383001 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5089b534-81b4-4eaf-93d2-bbc137d632d8-config-data\") pod \"nova-scheduler-0\" (UID: \"5089b534-81b4-4eaf-93d2-bbc137d632d8\") " pod="openstack/nova-scheduler-0" Dec 05 06:13:55 crc kubenswrapper[4742]: I1205 06:13:55.383072 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5089b534-81b4-4eaf-93d2-bbc137d632d8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5089b534-81b4-4eaf-93d2-bbc137d632d8\") " pod="openstack/nova-scheduler-0" Dec 05 06:13:55 crc kubenswrapper[4742]: I1205 06:13:55.383128 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tt2wx\" (UniqueName: \"kubernetes.io/projected/5089b534-81b4-4eaf-93d2-bbc137d632d8-kube-api-access-tt2wx\") pod \"nova-scheduler-0\" (UID: \"5089b534-81b4-4eaf-93d2-bbc137d632d8\") " pod="openstack/nova-scheduler-0" Dec 05 06:13:55 crc kubenswrapper[4742]: I1205 06:13:55.484572 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5089b534-81b4-4eaf-93d2-bbc137d632d8-config-data\") pod \"nova-scheduler-0\" (UID: \"5089b534-81b4-4eaf-93d2-bbc137d632d8\") " pod="openstack/nova-scheduler-0" Dec 05 06:13:55 crc kubenswrapper[4742]: I1205 06:13:55.484647 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5089b534-81b4-4eaf-93d2-bbc137d632d8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5089b534-81b4-4eaf-93d2-bbc137d632d8\") " pod="openstack/nova-scheduler-0" Dec 05 06:13:55 crc kubenswrapper[4742]: I1205 06:13:55.484709 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tt2wx\" (UniqueName: \"kubernetes.io/projected/5089b534-81b4-4eaf-93d2-bbc137d632d8-kube-api-access-tt2wx\") pod \"nova-scheduler-0\" (UID: \"5089b534-81b4-4eaf-93d2-bbc137d632d8\") " pod="openstack/nova-scheduler-0" Dec 05 06:13:55 crc kubenswrapper[4742]: I1205 06:13:55.491258 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5089b534-81b4-4eaf-93d2-bbc137d632d8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5089b534-81b4-4eaf-93d2-bbc137d632d8\") " pod="openstack/nova-scheduler-0" Dec 05 06:13:55 crc kubenswrapper[4742]: I1205 06:13:55.499683 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tt2wx\" (UniqueName: \"kubernetes.io/projected/5089b534-81b4-4eaf-93d2-bbc137d632d8-kube-api-access-tt2wx\") pod \"nova-scheduler-0\" (UID: \"5089b534-81b4-4eaf-93d2-bbc137d632d8\") " pod="openstack/nova-scheduler-0" Dec 05 06:13:55 crc kubenswrapper[4742]: I1205 06:13:55.506794 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5089b534-81b4-4eaf-93d2-bbc137d632d8-config-data\") pod \"nova-scheduler-0\" (UID: \"5089b534-81b4-4eaf-93d2-bbc137d632d8\") " pod="openstack/nova-scheduler-0" Dec 05 06:13:55 crc kubenswrapper[4742]: I1205 06:13:55.621806 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 06:13:55 crc kubenswrapper[4742]: I1205 06:13:55.772986 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 06:13:55 crc kubenswrapper[4742]: I1205 06:13:55.773494 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="b062c9ad-05c3-4a95-a880-e3b7ccfff3de" containerName="kube-state-metrics" containerID="cri-o://148e03b7513c06d95dcc0aa7eee3cfaf868afafc063c6ea1ceacb1994d13b688" gracePeriod=30 Dec 05 06:13:55 crc kubenswrapper[4742]: I1205 06:13:55.985754 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 06:13:55 crc kubenswrapper[4742]: I1205 06:13:55.986654 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.064230 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 06:13:56 crc kubenswrapper[4742]: W1205 06:13:56.151804 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5089b534_81b4_4eaf_93d2_bbc137d632d8.slice/crio-9e16c67977a836d8255184bc19e072e667aa04b11bf78411619901507a8d5ac8 WatchSource:0}: Error finding container 9e16c67977a836d8255184bc19e072e667aa04b11bf78411619901507a8d5ac8: Status 404 returned error can't find the container with id 9e16c67977a836d8255184bc19e072e667aa04b11bf78411619901507a8d5ac8 Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.158228 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.214647 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f888767f-2f9d-4514-805d-03de7aa50110-config-data\") pod \"f888767f-2f9d-4514-805d-03de7aa50110\" (UID: \"f888767f-2f9d-4514-805d-03de7aa50110\") " Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.214767 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6xbq7\" (UniqueName: \"kubernetes.io/projected/f888767f-2f9d-4514-805d-03de7aa50110-kube-api-access-6xbq7\") pod \"f888767f-2f9d-4514-805d-03de7aa50110\" (UID: \"f888767f-2f9d-4514-805d-03de7aa50110\") " Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.214844 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f888767f-2f9d-4514-805d-03de7aa50110-logs\") pod \"f888767f-2f9d-4514-805d-03de7aa50110\" (UID: \"f888767f-2f9d-4514-805d-03de7aa50110\") " Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.214898 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f888767f-2f9d-4514-805d-03de7aa50110-combined-ca-bundle\") pod \"f888767f-2f9d-4514-805d-03de7aa50110\" (UID: \"f888767f-2f9d-4514-805d-03de7aa50110\") " Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.215481 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f888767f-2f9d-4514-805d-03de7aa50110-logs" (OuterVolumeSpecName: "logs") pod "f888767f-2f9d-4514-805d-03de7aa50110" (UID: "f888767f-2f9d-4514-805d-03de7aa50110"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.218886 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f888767f-2f9d-4514-805d-03de7aa50110-kube-api-access-6xbq7" (OuterVolumeSpecName: "kube-api-access-6xbq7") pod "f888767f-2f9d-4514-805d-03de7aa50110" (UID: "f888767f-2f9d-4514-805d-03de7aa50110"). InnerVolumeSpecName "kube-api-access-6xbq7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.226774 4742 generic.go:334] "Generic (PLEG): container finished" podID="f888767f-2f9d-4514-805d-03de7aa50110" containerID="038b9da41e74aa1b29139a01cebf7e8813d1faac7980048b46e40c9aaa797eb7" exitCode=0 Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.226819 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.226839 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f888767f-2f9d-4514-805d-03de7aa50110","Type":"ContainerDied","Data":"038b9da41e74aa1b29139a01cebf7e8813d1faac7980048b46e40c9aaa797eb7"} Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.226878 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f888767f-2f9d-4514-805d-03de7aa50110","Type":"ContainerDied","Data":"287ced1c6a46812dbfdb3133a4bf8782fba762d94e260569aa1e2cf6c84bd527"} Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.226895 4742 scope.go:117] "RemoveContainer" containerID="038b9da41e74aa1b29139a01cebf7e8813d1faac7980048b46e40c9aaa797eb7" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.234430 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5089b534-81b4-4eaf-93d2-bbc137d632d8","Type":"ContainerStarted","Data":"9e16c67977a836d8255184bc19e072e667aa04b11bf78411619901507a8d5ac8"} Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.235576 4742 generic.go:334] "Generic (PLEG): container finished" podID="b062c9ad-05c3-4a95-a880-e3b7ccfff3de" containerID="148e03b7513c06d95dcc0aa7eee3cfaf868afafc063c6ea1ceacb1994d13b688" exitCode=2 Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.236332 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b062c9ad-05c3-4a95-a880-e3b7ccfff3de","Type":"ContainerDied","Data":"148e03b7513c06d95dcc0aa7eee3cfaf868afafc063c6ea1ceacb1994d13b688"} Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.236380 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"b062c9ad-05c3-4a95-a880-e3b7ccfff3de","Type":"ContainerDied","Data":"e9b74aced60cb56306fb19046c4166b10b4d86166857e8f9003cb54ff5aa8359"} Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.236392 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e9b74aced60cb56306fb19046c4166b10b4d86166857e8f9003cb54ff5aa8359" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.249198 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f888767f-2f9d-4514-805d-03de7aa50110-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f888767f-2f9d-4514-805d-03de7aa50110" (UID: "f888767f-2f9d-4514-805d-03de7aa50110"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.251192 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f888767f-2f9d-4514-805d-03de7aa50110-config-data" (OuterVolumeSpecName: "config-data") pod "f888767f-2f9d-4514-805d-03de7aa50110" (UID: "f888767f-2f9d-4514-805d-03de7aa50110"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.269973 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.274531 4742 scope.go:117] "RemoveContainer" containerID="4064bfec7b7a22ff0dc6d89181766eff0375f0fa5f51c04e27c079da039a03be" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.317433 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f888767f-2f9d-4514-805d-03de7aa50110-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.317501 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6xbq7\" (UniqueName: \"kubernetes.io/projected/f888767f-2f9d-4514-805d-03de7aa50110-kube-api-access-6xbq7\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.317513 4742 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f888767f-2f9d-4514-805d-03de7aa50110-logs\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.317523 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f888767f-2f9d-4514-805d-03de7aa50110-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.333103 4742 scope.go:117] "RemoveContainer" containerID="038b9da41e74aa1b29139a01cebf7e8813d1faac7980048b46e40c9aaa797eb7" Dec 05 06:13:56 crc kubenswrapper[4742]: E1205 06:13:56.337471 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"038b9da41e74aa1b29139a01cebf7e8813d1faac7980048b46e40c9aaa797eb7\": container with ID starting with 038b9da41e74aa1b29139a01cebf7e8813d1faac7980048b46e40c9aaa797eb7 not found: ID does not exist" containerID="038b9da41e74aa1b29139a01cebf7e8813d1faac7980048b46e40c9aaa797eb7" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.337505 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"038b9da41e74aa1b29139a01cebf7e8813d1faac7980048b46e40c9aaa797eb7"} err="failed to get container status \"038b9da41e74aa1b29139a01cebf7e8813d1faac7980048b46e40c9aaa797eb7\": rpc error: code = NotFound desc = could not find container \"038b9da41e74aa1b29139a01cebf7e8813d1faac7980048b46e40c9aaa797eb7\": container with ID starting with 038b9da41e74aa1b29139a01cebf7e8813d1faac7980048b46e40c9aaa797eb7 not found: ID does not exist" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.337529 4742 scope.go:117] "RemoveContainer" containerID="4064bfec7b7a22ff0dc6d89181766eff0375f0fa5f51c04e27c079da039a03be" Dec 05 06:13:56 crc kubenswrapper[4742]: E1205 06:13:56.337749 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4064bfec7b7a22ff0dc6d89181766eff0375f0fa5f51c04e27c079da039a03be\": container with ID starting with 4064bfec7b7a22ff0dc6d89181766eff0375f0fa5f51c04e27c079da039a03be not found: ID does not exist" containerID="4064bfec7b7a22ff0dc6d89181766eff0375f0fa5f51c04e27c079da039a03be" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.337773 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4064bfec7b7a22ff0dc6d89181766eff0375f0fa5f51c04e27c079da039a03be"} err="failed to get container status \"4064bfec7b7a22ff0dc6d89181766eff0375f0fa5f51c04e27c079da039a03be\": rpc error: code = NotFound desc = could not find container \"4064bfec7b7a22ff0dc6d89181766eff0375f0fa5f51c04e27c079da039a03be\": container with ID starting with 4064bfec7b7a22ff0dc6d89181766eff0375f0fa5f51c04e27c079da039a03be not found: ID does not exist" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.393508 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4e8c4f7-a7ad-4b52-a835-2dc38deb0020" path="/var/lib/kubelet/pods/d4e8c4f7-a7ad-4b52-a835-2dc38deb0020/volumes" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.419029 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bgdqp\" (UniqueName: \"kubernetes.io/projected/b062c9ad-05c3-4a95-a880-e3b7ccfff3de-kube-api-access-bgdqp\") pod \"b062c9ad-05c3-4a95-a880-e3b7ccfff3de\" (UID: \"b062c9ad-05c3-4a95-a880-e3b7ccfff3de\") " Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.421988 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b062c9ad-05c3-4a95-a880-e3b7ccfff3de-kube-api-access-bgdqp" (OuterVolumeSpecName: "kube-api-access-bgdqp") pod "b062c9ad-05c3-4a95-a880-e3b7ccfff3de" (UID: "b062c9ad-05c3-4a95-a880-e3b7ccfff3de"). InnerVolumeSpecName "kube-api-access-bgdqp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.521072 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bgdqp\" (UniqueName: \"kubernetes.io/projected/b062c9ad-05c3-4a95-a880-e3b7ccfff3de-kube-api-access-bgdqp\") on node \"crc\" DevicePath \"\"" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.547957 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.556877 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.579022 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 06:13:56 crc kubenswrapper[4742]: E1205 06:13:56.579474 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f888767f-2f9d-4514-805d-03de7aa50110" containerName="nova-api-log" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.579492 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="f888767f-2f9d-4514-805d-03de7aa50110" containerName="nova-api-log" Dec 05 06:13:56 crc kubenswrapper[4742]: E1205 06:13:56.579501 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f888767f-2f9d-4514-805d-03de7aa50110" containerName="nova-api-api" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.579509 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="f888767f-2f9d-4514-805d-03de7aa50110" containerName="nova-api-api" Dec 05 06:13:56 crc kubenswrapper[4742]: E1205 06:13:56.579519 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b062c9ad-05c3-4a95-a880-e3b7ccfff3de" containerName="kube-state-metrics" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.579527 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="b062c9ad-05c3-4a95-a880-e3b7ccfff3de" containerName="kube-state-metrics" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.579686 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="f888767f-2f9d-4514-805d-03de7aa50110" containerName="nova-api-api" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.579703 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="f888767f-2f9d-4514-805d-03de7aa50110" containerName="nova-api-log" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.579718 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="b062c9ad-05c3-4a95-a880-e3b7ccfff3de" containerName="kube-state-metrics" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.580692 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.582616 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.611681 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.727164 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1987769-2798-4fb2-a360-ba285b3b73a0-logs\") pod \"nova-api-0\" (UID: \"e1987769-2798-4fb2-a360-ba285b3b73a0\") " pod="openstack/nova-api-0" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.727317 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-485kt\" (UniqueName: \"kubernetes.io/projected/e1987769-2798-4fb2-a360-ba285b3b73a0-kube-api-access-485kt\") pod \"nova-api-0\" (UID: \"e1987769-2798-4fb2-a360-ba285b3b73a0\") " pod="openstack/nova-api-0" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.727355 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1987769-2798-4fb2-a360-ba285b3b73a0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e1987769-2798-4fb2-a360-ba285b3b73a0\") " pod="openstack/nova-api-0" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.727441 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1987769-2798-4fb2-a360-ba285b3b73a0-config-data\") pod \"nova-api-0\" (UID: \"e1987769-2798-4fb2-a360-ba285b3b73a0\") " pod="openstack/nova-api-0" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.833381 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1987769-2798-4fb2-a360-ba285b3b73a0-logs\") pod \"nova-api-0\" (UID: \"e1987769-2798-4fb2-a360-ba285b3b73a0\") " pod="openstack/nova-api-0" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.833662 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-485kt\" (UniqueName: \"kubernetes.io/projected/e1987769-2798-4fb2-a360-ba285b3b73a0-kube-api-access-485kt\") pod \"nova-api-0\" (UID: \"e1987769-2798-4fb2-a360-ba285b3b73a0\") " pod="openstack/nova-api-0" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.833695 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1987769-2798-4fb2-a360-ba285b3b73a0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e1987769-2798-4fb2-a360-ba285b3b73a0\") " pod="openstack/nova-api-0" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.833742 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1987769-2798-4fb2-a360-ba285b3b73a0-config-data\") pod \"nova-api-0\" (UID: \"e1987769-2798-4fb2-a360-ba285b3b73a0\") " pod="openstack/nova-api-0" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.834876 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1987769-2798-4fb2-a360-ba285b3b73a0-logs\") pod \"nova-api-0\" (UID: \"e1987769-2798-4fb2-a360-ba285b3b73a0\") " pod="openstack/nova-api-0" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.838182 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1987769-2798-4fb2-a360-ba285b3b73a0-config-data\") pod \"nova-api-0\" (UID: \"e1987769-2798-4fb2-a360-ba285b3b73a0\") " pod="openstack/nova-api-0" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.839622 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1987769-2798-4fb2-a360-ba285b3b73a0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"e1987769-2798-4fb2-a360-ba285b3b73a0\") " pod="openstack/nova-api-0" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.858623 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-485kt\" (UniqueName: \"kubernetes.io/projected/e1987769-2798-4fb2-a360-ba285b3b73a0-kube-api-access-485kt\") pod \"nova-api-0\" (UID: \"e1987769-2798-4fb2-a360-ba285b3b73a0\") " pod="openstack/nova-api-0" Dec 05 06:13:56 crc kubenswrapper[4742]: I1205 06:13:56.906486 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 06:13:57 crc kubenswrapper[4742]: I1205 06:13:57.246292 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5089b534-81b4-4eaf-93d2-bbc137d632d8","Type":"ContainerStarted","Data":"b7120a8ead8ad06ddd6fc46526565d534ca3904dff3a8f3d55bd54b819e1a84b"} Dec 05 06:13:57 crc kubenswrapper[4742]: I1205 06:13:57.247983 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 06:13:57 crc kubenswrapper[4742]: I1205 06:13:57.269036 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.269019323 podStartE2EDuration="2.269019323s" podCreationTimestamp="2025-12-05 06:13:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:13:57.265218633 +0000 UTC m=+1313.177353695" watchObservedRunningTime="2025-12-05 06:13:57.269019323 +0000 UTC m=+1313.181154395" Dec 05 06:13:57 crc kubenswrapper[4742]: I1205 06:13:57.290084 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 06:13:57 crc kubenswrapper[4742]: I1205 06:13:57.301150 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 06:13:57 crc kubenswrapper[4742]: I1205 06:13:57.310246 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 06:13:57 crc kubenswrapper[4742]: I1205 06:13:57.311344 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 06:13:57 crc kubenswrapper[4742]: I1205 06:13:57.313194 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 05 06:13:57 crc kubenswrapper[4742]: I1205 06:13:57.313331 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 05 06:13:57 crc kubenswrapper[4742]: I1205 06:13:57.323931 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 06:13:57 crc kubenswrapper[4742]: W1205 06:13:57.411887 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode1987769_2798_4fb2_a360_ba285b3b73a0.slice/crio-785a85ee9cc4404741a8cf6df92cf5d1237e1de49749eb3b90e0408408f59cbf WatchSource:0}: Error finding container 785a85ee9cc4404741a8cf6df92cf5d1237e1de49749eb3b90e0408408f59cbf: Status 404 returned error can't find the container with id 785a85ee9cc4404741a8cf6df92cf5d1237e1de49749eb3b90e0408408f59cbf Dec 05 06:13:57 crc kubenswrapper[4742]: I1205 06:13:57.414733 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 06:13:57 crc kubenswrapper[4742]: I1205 06:13:57.442658 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb4dce96-8228-455b-9edc-37a62af6e732-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"fb4dce96-8228-455b-9edc-37a62af6e732\") " pod="openstack/kube-state-metrics-0" Dec 05 06:13:57 crc kubenswrapper[4742]: I1205 06:13:57.442740 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb4dce96-8228-455b-9edc-37a62af6e732-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"fb4dce96-8228-455b-9edc-37a62af6e732\") " pod="openstack/kube-state-metrics-0" Dec 05 06:13:57 crc kubenswrapper[4742]: I1205 06:13:57.442795 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whz8j\" (UniqueName: \"kubernetes.io/projected/fb4dce96-8228-455b-9edc-37a62af6e732-kube-api-access-whz8j\") pod \"kube-state-metrics-0\" (UID: \"fb4dce96-8228-455b-9edc-37a62af6e732\") " pod="openstack/kube-state-metrics-0" Dec 05 06:13:57 crc kubenswrapper[4742]: I1205 06:13:57.442851 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/fb4dce96-8228-455b-9edc-37a62af6e732-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"fb4dce96-8228-455b-9edc-37a62af6e732\") " pod="openstack/kube-state-metrics-0" Dec 05 06:13:57 crc kubenswrapper[4742]: I1205 06:13:57.545010 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb4dce96-8228-455b-9edc-37a62af6e732-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"fb4dce96-8228-455b-9edc-37a62af6e732\") " pod="openstack/kube-state-metrics-0" Dec 05 06:13:57 crc kubenswrapper[4742]: I1205 06:13:57.545134 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb4dce96-8228-455b-9edc-37a62af6e732-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"fb4dce96-8228-455b-9edc-37a62af6e732\") " pod="openstack/kube-state-metrics-0" Dec 05 06:13:57 crc kubenswrapper[4742]: I1205 06:13:57.545214 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whz8j\" (UniqueName: \"kubernetes.io/projected/fb4dce96-8228-455b-9edc-37a62af6e732-kube-api-access-whz8j\") pod \"kube-state-metrics-0\" (UID: \"fb4dce96-8228-455b-9edc-37a62af6e732\") " pod="openstack/kube-state-metrics-0" Dec 05 06:13:57 crc kubenswrapper[4742]: I1205 06:13:57.545261 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/fb4dce96-8228-455b-9edc-37a62af6e732-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"fb4dce96-8228-455b-9edc-37a62af6e732\") " pod="openstack/kube-state-metrics-0" Dec 05 06:13:57 crc kubenswrapper[4742]: I1205 06:13:57.551755 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/fb4dce96-8228-455b-9edc-37a62af6e732-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"fb4dce96-8228-455b-9edc-37a62af6e732\") " pod="openstack/kube-state-metrics-0" Dec 05 06:13:57 crc kubenswrapper[4742]: I1205 06:13:57.552136 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb4dce96-8228-455b-9edc-37a62af6e732-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"fb4dce96-8228-455b-9edc-37a62af6e732\") " pod="openstack/kube-state-metrics-0" Dec 05 06:13:57 crc kubenswrapper[4742]: I1205 06:13:57.552909 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb4dce96-8228-455b-9edc-37a62af6e732-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"fb4dce96-8228-455b-9edc-37a62af6e732\") " pod="openstack/kube-state-metrics-0" Dec 05 06:13:57 crc kubenswrapper[4742]: I1205 06:13:57.569900 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whz8j\" (UniqueName: \"kubernetes.io/projected/fb4dce96-8228-455b-9edc-37a62af6e732-kube-api-access-whz8j\") pod \"kube-state-metrics-0\" (UID: \"fb4dce96-8228-455b-9edc-37a62af6e732\") " pod="openstack/kube-state-metrics-0" Dec 05 06:13:57 crc kubenswrapper[4742]: I1205 06:13:57.591742 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:13:57 crc kubenswrapper[4742]: I1205 06:13:57.592089 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6b5415e6-320d-4957-b6f9-045c625ed624" containerName="ceilometer-central-agent" containerID="cri-o://523fcbc93993d93ce5d60e372636854a73cff181f5007baa19e27e87705ed6eb" gracePeriod=30 Dec 05 06:13:57 crc kubenswrapper[4742]: I1205 06:13:57.592223 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6b5415e6-320d-4957-b6f9-045c625ed624" containerName="proxy-httpd" containerID="cri-o://a87001c923191d4641603442e8ce551a7bd18b6c35ae8575a0dc29a6bb9c9878" gracePeriod=30 Dec 05 06:13:57 crc kubenswrapper[4742]: I1205 06:13:57.592275 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6b5415e6-320d-4957-b6f9-045c625ed624" containerName="sg-core" containerID="cri-o://0fee4bfffd1c6d9fb847a64ac8cce05cf829a5e628acf6b91b83d94c99ceebd7" gracePeriod=30 Dec 05 06:13:57 crc kubenswrapper[4742]: I1205 06:13:57.592318 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6b5415e6-320d-4957-b6f9-045c625ed624" containerName="ceilometer-notification-agent" containerID="cri-o://989ea36fc77480278a9c0c731bc9744ebc9600bda470eec6054fc14e2e0af294" gracePeriod=30 Dec 05 06:13:57 crc kubenswrapper[4742]: I1205 06:13:57.631680 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 06:13:58 crc kubenswrapper[4742]: I1205 06:13:58.068250 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 06:13:58 crc kubenswrapper[4742]: W1205 06:13:58.072522 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfb4dce96_8228_455b_9edc_37a62af6e732.slice/crio-885fd16106f99af98697418992ad069dd83cab998ea801688b659e57f0ca4423 WatchSource:0}: Error finding container 885fd16106f99af98697418992ad069dd83cab998ea801688b659e57f0ca4423: Status 404 returned error can't find the container with id 885fd16106f99af98697418992ad069dd83cab998ea801688b659e57f0ca4423 Dec 05 06:13:58 crc kubenswrapper[4742]: I1205 06:13:58.263226 4742 generic.go:334] "Generic (PLEG): container finished" podID="6b5415e6-320d-4957-b6f9-045c625ed624" containerID="a87001c923191d4641603442e8ce551a7bd18b6c35ae8575a0dc29a6bb9c9878" exitCode=0 Dec 05 06:13:58 crc kubenswrapper[4742]: I1205 06:13:58.263614 4742 generic.go:334] "Generic (PLEG): container finished" podID="6b5415e6-320d-4957-b6f9-045c625ed624" containerID="0fee4bfffd1c6d9fb847a64ac8cce05cf829a5e628acf6b91b83d94c99ceebd7" exitCode=2 Dec 05 06:13:58 crc kubenswrapper[4742]: I1205 06:13:58.263634 4742 generic.go:334] "Generic (PLEG): container finished" podID="6b5415e6-320d-4957-b6f9-045c625ed624" containerID="523fcbc93993d93ce5d60e372636854a73cff181f5007baa19e27e87705ed6eb" exitCode=0 Dec 05 06:13:58 crc kubenswrapper[4742]: I1205 06:13:58.263297 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b5415e6-320d-4957-b6f9-045c625ed624","Type":"ContainerDied","Data":"a87001c923191d4641603442e8ce551a7bd18b6c35ae8575a0dc29a6bb9c9878"} Dec 05 06:13:58 crc kubenswrapper[4742]: I1205 06:13:58.263718 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b5415e6-320d-4957-b6f9-045c625ed624","Type":"ContainerDied","Data":"0fee4bfffd1c6d9fb847a64ac8cce05cf829a5e628acf6b91b83d94c99ceebd7"} Dec 05 06:13:58 crc kubenswrapper[4742]: I1205 06:13:58.263729 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b5415e6-320d-4957-b6f9-045c625ed624","Type":"ContainerDied","Data":"523fcbc93993d93ce5d60e372636854a73cff181f5007baa19e27e87705ed6eb"} Dec 05 06:13:58 crc kubenswrapper[4742]: I1205 06:13:58.265876 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e1987769-2798-4fb2-a360-ba285b3b73a0","Type":"ContainerStarted","Data":"470102709ea8135ed0d9784361bb33cd3a33fd0f0bfc58dfeee8b2c5d6d7174c"} Dec 05 06:13:58 crc kubenswrapper[4742]: I1205 06:13:58.265902 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e1987769-2798-4fb2-a360-ba285b3b73a0","Type":"ContainerStarted","Data":"bc378e75ac678ed1dd7cac3cc0c06d12b7153547b41b423c5cf928fdbf445476"} Dec 05 06:13:58 crc kubenswrapper[4742]: I1205 06:13:58.265912 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e1987769-2798-4fb2-a360-ba285b3b73a0","Type":"ContainerStarted","Data":"785a85ee9cc4404741a8cf6df92cf5d1237e1de49749eb3b90e0408408f59cbf"} Dec 05 06:13:58 crc kubenswrapper[4742]: I1205 06:13:58.267303 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"fb4dce96-8228-455b-9edc-37a62af6e732","Type":"ContainerStarted","Data":"885fd16106f99af98697418992ad069dd83cab998ea801688b659e57f0ca4423"} Dec 05 06:13:58 crc kubenswrapper[4742]: I1205 06:13:58.289563 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.289539305 podStartE2EDuration="2.289539305s" podCreationTimestamp="2025-12-05 06:13:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:13:58.280955038 +0000 UTC m=+1314.193090100" watchObservedRunningTime="2025-12-05 06:13:58.289539305 +0000 UTC m=+1314.201674377" Dec 05 06:13:58 crc kubenswrapper[4742]: I1205 06:13:58.395164 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b062c9ad-05c3-4a95-a880-e3b7ccfff3de" path="/var/lib/kubelet/pods/b062c9ad-05c3-4a95-a880-e3b7ccfff3de/volumes" Dec 05 06:13:58 crc kubenswrapper[4742]: I1205 06:13:58.395893 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f888767f-2f9d-4514-805d-03de7aa50110" path="/var/lib/kubelet/pods/f888767f-2f9d-4514-805d-03de7aa50110/volumes" Dec 05 06:13:59 crc kubenswrapper[4742]: I1205 06:13:59.280013 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"fb4dce96-8228-455b-9edc-37a62af6e732","Type":"ContainerStarted","Data":"63b79a58b1a6b877064a1e2ed25d6d589d4ca79328f599b17692a53d79df74d6"} Dec 05 06:13:59 crc kubenswrapper[4742]: I1205 06:13:59.303623 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.9391187319999998 podStartE2EDuration="2.303597147s" podCreationTimestamp="2025-12-05 06:13:57 +0000 UTC" firstStartedPulling="2025-12-05 06:13:58.074957694 +0000 UTC m=+1313.987092756" lastFinishedPulling="2025-12-05 06:13:58.439436099 +0000 UTC m=+1314.351571171" observedRunningTime="2025-12-05 06:13:59.296046628 +0000 UTC m=+1315.208181690" watchObservedRunningTime="2025-12-05 06:13:59.303597147 +0000 UTC m=+1315.215732209" Dec 05 06:14:00 crc kubenswrapper[4742]: I1205 06:14:00.290706 4742 generic.go:334] "Generic (PLEG): container finished" podID="6b5415e6-320d-4957-b6f9-045c625ed624" containerID="989ea36fc77480278a9c0c731bc9744ebc9600bda470eec6054fc14e2e0af294" exitCode=0 Dec 05 06:14:00 crc kubenswrapper[4742]: I1205 06:14:00.290796 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b5415e6-320d-4957-b6f9-045c625ed624","Type":"ContainerDied","Data":"989ea36fc77480278a9c0c731bc9744ebc9600bda470eec6054fc14e2e0af294"} Dec 05 06:14:00 crc kubenswrapper[4742]: I1205 06:14:00.291100 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 05 06:14:00 crc kubenswrapper[4742]: I1205 06:14:00.480820 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 06:14:00 crc kubenswrapper[4742]: I1205 06:14:00.587753 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 05 06:14:00 crc kubenswrapper[4742]: I1205 06:14:00.603458 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b5415e6-320d-4957-b6f9-045c625ed624-run-httpd\") pod \"6b5415e6-320d-4957-b6f9-045c625ed624\" (UID: \"6b5415e6-320d-4957-b6f9-045c625ed624\") " Dec 05 06:14:00 crc kubenswrapper[4742]: I1205 06:14:00.603609 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b5415e6-320d-4957-b6f9-045c625ed624-combined-ca-bundle\") pod \"6b5415e6-320d-4957-b6f9-045c625ed624\" (UID: \"6b5415e6-320d-4957-b6f9-045c625ed624\") " Dec 05 06:14:00 crc kubenswrapper[4742]: I1205 06:14:00.603663 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b5415e6-320d-4957-b6f9-045c625ed624-log-httpd\") pod \"6b5415e6-320d-4957-b6f9-045c625ed624\" (UID: \"6b5415e6-320d-4957-b6f9-045c625ed624\") " Dec 05 06:14:00 crc kubenswrapper[4742]: I1205 06:14:00.603697 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6rcn6\" (UniqueName: \"kubernetes.io/projected/6b5415e6-320d-4957-b6f9-045c625ed624-kube-api-access-6rcn6\") pod \"6b5415e6-320d-4957-b6f9-045c625ed624\" (UID: \"6b5415e6-320d-4957-b6f9-045c625ed624\") " Dec 05 06:14:00 crc kubenswrapper[4742]: I1205 06:14:00.603723 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b5415e6-320d-4957-b6f9-045c625ed624-config-data\") pod \"6b5415e6-320d-4957-b6f9-045c625ed624\" (UID: \"6b5415e6-320d-4957-b6f9-045c625ed624\") " Dec 05 06:14:00 crc kubenswrapper[4742]: I1205 06:14:00.603805 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b5415e6-320d-4957-b6f9-045c625ed624-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "6b5415e6-320d-4957-b6f9-045c625ed624" (UID: "6b5415e6-320d-4957-b6f9-045c625ed624"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:14:00 crc kubenswrapper[4742]: I1205 06:14:00.603880 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6b5415e6-320d-4957-b6f9-045c625ed624-sg-core-conf-yaml\") pod \"6b5415e6-320d-4957-b6f9-045c625ed624\" (UID: \"6b5415e6-320d-4957-b6f9-045c625ed624\") " Dec 05 06:14:00 crc kubenswrapper[4742]: I1205 06:14:00.603907 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b5415e6-320d-4957-b6f9-045c625ed624-scripts\") pod \"6b5415e6-320d-4957-b6f9-045c625ed624\" (UID: \"6b5415e6-320d-4957-b6f9-045c625ed624\") " Dec 05 06:14:00 crc kubenswrapper[4742]: I1205 06:14:00.603996 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b5415e6-320d-4957-b6f9-045c625ed624-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "6b5415e6-320d-4957-b6f9-045c625ed624" (UID: "6b5415e6-320d-4957-b6f9-045c625ed624"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:14:00 crc kubenswrapper[4742]: I1205 06:14:00.605508 4742 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b5415e6-320d-4957-b6f9-045c625ed624-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:00 crc kubenswrapper[4742]: I1205 06:14:00.605539 4742 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b5415e6-320d-4957-b6f9-045c625ed624-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:00 crc kubenswrapper[4742]: I1205 06:14:00.615396 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b5415e6-320d-4957-b6f9-045c625ed624-scripts" (OuterVolumeSpecName: "scripts") pod "6b5415e6-320d-4957-b6f9-045c625ed624" (UID: "6b5415e6-320d-4957-b6f9-045c625ed624"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:14:00 crc kubenswrapper[4742]: I1205 06:14:00.622036 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b5415e6-320d-4957-b6f9-045c625ed624-kube-api-access-6rcn6" (OuterVolumeSpecName: "kube-api-access-6rcn6") pod "6b5415e6-320d-4957-b6f9-045c625ed624" (UID: "6b5415e6-320d-4957-b6f9-045c625ed624"). InnerVolumeSpecName "kube-api-access-6rcn6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:14:00 crc kubenswrapper[4742]: I1205 06:14:00.626450 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 06:14:00 crc kubenswrapper[4742]: I1205 06:14:00.636985 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b5415e6-320d-4957-b6f9-045c625ed624-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "6b5415e6-320d-4957-b6f9-045c625ed624" (UID: "6b5415e6-320d-4957-b6f9-045c625ed624"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:14:00 crc kubenswrapper[4742]: I1205 06:14:00.674942 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b5415e6-320d-4957-b6f9-045c625ed624-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6b5415e6-320d-4957-b6f9-045c625ed624" (UID: "6b5415e6-320d-4957-b6f9-045c625ed624"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:14:00 crc kubenswrapper[4742]: I1205 06:14:00.708519 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b5415e6-320d-4957-b6f9-045c625ed624-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:00 crc kubenswrapper[4742]: I1205 06:14:00.708554 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6rcn6\" (UniqueName: \"kubernetes.io/projected/6b5415e6-320d-4957-b6f9-045c625ed624-kube-api-access-6rcn6\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:00 crc kubenswrapper[4742]: I1205 06:14:00.708567 4742 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6b5415e6-320d-4957-b6f9-045c625ed624-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:00 crc kubenswrapper[4742]: I1205 06:14:00.708577 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b5415e6-320d-4957-b6f9-045c625ed624-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:00 crc kubenswrapper[4742]: I1205 06:14:00.730503 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b5415e6-320d-4957-b6f9-045c625ed624-config-data" (OuterVolumeSpecName: "config-data") pod "6b5415e6-320d-4957-b6f9-045c625ed624" (UID: "6b5415e6-320d-4957-b6f9-045c625ed624"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:14:00 crc kubenswrapper[4742]: I1205 06:14:00.809737 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b5415e6-320d-4957-b6f9-045c625ed624-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:00 crc kubenswrapper[4742]: I1205 06:14:00.985139 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 06:14:00 crc kubenswrapper[4742]: I1205 06:14:00.985187 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.304318 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b5415e6-320d-4957-b6f9-045c625ed624","Type":"ContainerDied","Data":"a3f28f152564af1817bc17e5d7b6833ced2ffe5149e9b070ae3cb7bc6f569a01"} Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.304390 4742 scope.go:117] "RemoveContainer" containerID="a87001c923191d4641603442e8ce551a7bd18b6c35ae8575a0dc29a6bb9c9878" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.304432 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.327559 4742 scope.go:117] "RemoveContainer" containerID="0fee4bfffd1c6d9fb847a64ac8cce05cf829a5e628acf6b91b83d94c99ceebd7" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.346671 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.351512 4742 scope.go:117] "RemoveContainer" containerID="989ea36fc77480278a9c0c731bc9744ebc9600bda470eec6054fc14e2e0af294" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.357295 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.371073 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:14:01 crc kubenswrapper[4742]: E1205 06:14:01.371467 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b5415e6-320d-4957-b6f9-045c625ed624" containerName="sg-core" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.371484 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b5415e6-320d-4957-b6f9-045c625ed624" containerName="sg-core" Dec 05 06:14:01 crc kubenswrapper[4742]: E1205 06:14:01.371521 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b5415e6-320d-4957-b6f9-045c625ed624" containerName="proxy-httpd" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.371528 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b5415e6-320d-4957-b6f9-045c625ed624" containerName="proxy-httpd" Dec 05 06:14:01 crc kubenswrapper[4742]: E1205 06:14:01.371539 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b5415e6-320d-4957-b6f9-045c625ed624" containerName="ceilometer-notification-agent" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.371545 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b5415e6-320d-4957-b6f9-045c625ed624" containerName="ceilometer-notification-agent" Dec 05 06:14:01 crc kubenswrapper[4742]: E1205 06:14:01.371557 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b5415e6-320d-4957-b6f9-045c625ed624" containerName="ceilometer-central-agent" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.371565 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b5415e6-320d-4957-b6f9-045c625ed624" containerName="ceilometer-central-agent" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.371740 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b5415e6-320d-4957-b6f9-045c625ed624" containerName="ceilometer-notification-agent" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.371750 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b5415e6-320d-4957-b6f9-045c625ed624" containerName="ceilometer-central-agent" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.371763 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b5415e6-320d-4957-b6f9-045c625ed624" containerName="sg-core" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.371772 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b5415e6-320d-4957-b6f9-045c625ed624" containerName="proxy-httpd" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.373755 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.381078 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.381343 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.385556 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.392604 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.405295 4742 scope.go:117] "RemoveContainer" containerID="523fcbc93993d93ce5d60e372636854a73cff181f5007baa19e27e87705ed6eb" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.523270 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hzg4\" (UniqueName: \"kubernetes.io/projected/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-kube-api-access-4hzg4\") pod \"ceilometer-0\" (UID: \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\") " pod="openstack/ceilometer-0" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.523340 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-config-data\") pod \"ceilometer-0\" (UID: \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\") " pod="openstack/ceilometer-0" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.523367 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\") " pod="openstack/ceilometer-0" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.523423 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-scripts\") pod \"ceilometer-0\" (UID: \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\") " pod="openstack/ceilometer-0" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.523439 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-log-httpd\") pod \"ceilometer-0\" (UID: \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\") " pod="openstack/ceilometer-0" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.523512 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\") " pod="openstack/ceilometer-0" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.523552 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-run-httpd\") pod \"ceilometer-0\" (UID: \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\") " pod="openstack/ceilometer-0" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.523571 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\") " pod="openstack/ceilometer-0" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.625515 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\") " pod="openstack/ceilometer-0" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.625588 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-run-httpd\") pod \"ceilometer-0\" (UID: \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\") " pod="openstack/ceilometer-0" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.625614 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\") " pod="openstack/ceilometer-0" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.625647 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hzg4\" (UniqueName: \"kubernetes.io/projected/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-kube-api-access-4hzg4\") pod \"ceilometer-0\" (UID: \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\") " pod="openstack/ceilometer-0" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.625685 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-config-data\") pod \"ceilometer-0\" (UID: \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\") " pod="openstack/ceilometer-0" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.625705 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\") " pod="openstack/ceilometer-0" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.625751 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-log-httpd\") pod \"ceilometer-0\" (UID: \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\") " pod="openstack/ceilometer-0" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.625766 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-scripts\") pod \"ceilometer-0\" (UID: \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\") " pod="openstack/ceilometer-0" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.626721 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-run-httpd\") pod \"ceilometer-0\" (UID: \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\") " pod="openstack/ceilometer-0" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.627003 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-log-httpd\") pod \"ceilometer-0\" (UID: \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\") " pod="openstack/ceilometer-0" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.631935 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-scripts\") pod \"ceilometer-0\" (UID: \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\") " pod="openstack/ceilometer-0" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.632549 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\") " pod="openstack/ceilometer-0" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.632678 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-config-data\") pod \"ceilometer-0\" (UID: \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\") " pod="openstack/ceilometer-0" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.642717 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\") " pod="openstack/ceilometer-0" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.643079 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\") " pod="openstack/ceilometer-0" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.659773 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hzg4\" (UniqueName: \"kubernetes.io/projected/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-kube-api-access-4hzg4\") pod \"ceilometer-0\" (UID: \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\") " pod="openstack/ceilometer-0" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.709342 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.996200 4742 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="5ce4fb02-31bd-4877-8d52-bad17bc4306d" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.189:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 06:14:01 crc kubenswrapper[4742]: I1205 06:14:01.996218 4742 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="5ce4fb02-31bd-4877-8d52-bad17bc4306d" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.189:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 06:14:02 crc kubenswrapper[4742]: I1205 06:14:02.162236 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:14:02 crc kubenswrapper[4742]: I1205 06:14:02.313631 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a731c802-f560-4b37-ad63-e8c9d1e8a9a7","Type":"ContainerStarted","Data":"b109e94d3a223b4a640bdea0d4227c250deb7d5326f7a170419bc34d889e6934"} Dec 05 06:14:02 crc kubenswrapper[4742]: I1205 06:14:02.392513 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b5415e6-320d-4957-b6f9-045c625ed624" path="/var/lib/kubelet/pods/6b5415e6-320d-4957-b6f9-045c625ed624/volumes" Dec 05 06:14:03 crc kubenswrapper[4742]: I1205 06:14:03.327841 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a731c802-f560-4b37-ad63-e8c9d1e8a9a7","Type":"ContainerStarted","Data":"803941e74cd728e40bbab2da7297f0c85e53bd820ad503c8c1f9f506c52d7374"} Dec 05 06:14:04 crc kubenswrapper[4742]: I1205 06:14:04.352932 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a731c802-f560-4b37-ad63-e8c9d1e8a9a7","Type":"ContainerStarted","Data":"ec17a8f9f404771e14bbf7f37d442576bace2a25cf8ed54b1e37b4984ec2fe6b"} Dec 05 06:14:05 crc kubenswrapper[4742]: I1205 06:14:05.362735 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a731c802-f560-4b37-ad63-e8c9d1e8a9a7","Type":"ContainerStarted","Data":"80926a38772beb15c7fa254590259084c23bf3a199cee64242067a26d5d26980"} Dec 05 06:14:05 crc kubenswrapper[4742]: I1205 06:14:05.622913 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 06:14:05 crc kubenswrapper[4742]: I1205 06:14:05.661834 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 06:14:06 crc kubenswrapper[4742]: I1205 06:14:06.378485 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a731c802-f560-4b37-ad63-e8c9d1e8a9a7","Type":"ContainerStarted","Data":"e0ded2405706529b24f387d578fd29830095302ecb7dc63dfe7fd7600f62c9ef"} Dec 05 06:14:06 crc kubenswrapper[4742]: I1205 06:14:06.439866 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 06:14:06 crc kubenswrapper[4742]: I1205 06:14:06.445634 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.899253894 podStartE2EDuration="5.445605351s" podCreationTimestamp="2025-12-05 06:14:01 +0000 UTC" firstStartedPulling="2025-12-05 06:14:02.169381119 +0000 UTC m=+1318.081516181" lastFinishedPulling="2025-12-05 06:14:05.715732576 +0000 UTC m=+1321.627867638" observedRunningTime="2025-12-05 06:14:06.439427598 +0000 UTC m=+1322.351562710" watchObservedRunningTime="2025-12-05 06:14:06.445605351 +0000 UTC m=+1322.357740453" Dec 05 06:14:06 crc kubenswrapper[4742]: I1205 06:14:06.907681 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 06:14:06 crc kubenswrapper[4742]: I1205 06:14:06.907746 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 06:14:07 crc kubenswrapper[4742]: I1205 06:14:07.386823 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 06:14:07 crc kubenswrapper[4742]: I1205 06:14:07.644430 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 05 06:14:08 crc kubenswrapper[4742]: I1205 06:14:08.000187 4742 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="e1987769-2798-4fb2-a360-ba285b3b73a0" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.191:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 06:14:08 crc kubenswrapper[4742]: I1205 06:14:08.000197 4742 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="e1987769-2798-4fb2-a360-ba285b3b73a0" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.191:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 06:14:08 crc kubenswrapper[4742]: I1205 06:14:08.808638 4742 scope.go:117] "RemoveContainer" containerID="2ee8c4f0acc7b780f12096055b1a02fa1a069b63115f36c4ef9190e1909d47b0" Dec 05 06:14:08 crc kubenswrapper[4742]: I1205 06:14:08.833418 4742 scope.go:117] "RemoveContainer" containerID="113080644dc5c27081f011c664020d9bcb0351dc278860fa7ec579c058ff374a" Dec 05 06:14:08 crc kubenswrapper[4742]: I1205 06:14:08.861226 4742 scope.go:117] "RemoveContainer" containerID="e4da84089c7b4290623b1664b94657d09fc904251641f6c9d90301c31eb2208a" Dec 05 06:14:10 crc kubenswrapper[4742]: I1205 06:14:10.991965 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 06:14:10 crc kubenswrapper[4742]: I1205 06:14:10.993612 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 06:14:11 crc kubenswrapper[4742]: I1205 06:14:11.027391 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 06:14:11 crc kubenswrapper[4742]: I1205 06:14:11.431439 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 06:14:13 crc kubenswrapper[4742]: I1205 06:14:13.389697 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:14:13 crc kubenswrapper[4742]: I1205 06:14:13.450848 4742 generic.go:334] "Generic (PLEG): container finished" podID="3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28" containerID="5e9198ed7331873e639b7e979d47e18d2c852caadc64a825cf7b82063fb55bbb" exitCode=137 Dec 05 06:14:13 crc kubenswrapper[4742]: I1205 06:14:13.450906 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:14:13 crc kubenswrapper[4742]: I1205 06:14:13.450895 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28","Type":"ContainerDied","Data":"5e9198ed7331873e639b7e979d47e18d2c852caadc64a825cf7b82063fb55bbb"} Dec 05 06:14:13 crc kubenswrapper[4742]: I1205 06:14:13.451044 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28","Type":"ContainerDied","Data":"5ae824c6e688ffa0a3fbd738de1c1ae3475f7e82c624f9f061701173a4472c74"} Dec 05 06:14:13 crc kubenswrapper[4742]: I1205 06:14:13.451083 4742 scope.go:117] "RemoveContainer" containerID="5e9198ed7331873e639b7e979d47e18d2c852caadc64a825cf7b82063fb55bbb" Dec 05 06:14:13 crc kubenswrapper[4742]: I1205 06:14:13.471665 4742 scope.go:117] "RemoveContainer" containerID="5e9198ed7331873e639b7e979d47e18d2c852caadc64a825cf7b82063fb55bbb" Dec 05 06:14:13 crc kubenswrapper[4742]: E1205 06:14:13.472145 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e9198ed7331873e639b7e979d47e18d2c852caadc64a825cf7b82063fb55bbb\": container with ID starting with 5e9198ed7331873e639b7e979d47e18d2c852caadc64a825cf7b82063fb55bbb not found: ID does not exist" containerID="5e9198ed7331873e639b7e979d47e18d2c852caadc64a825cf7b82063fb55bbb" Dec 05 06:14:13 crc kubenswrapper[4742]: I1205 06:14:13.472221 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e9198ed7331873e639b7e979d47e18d2c852caadc64a825cf7b82063fb55bbb"} err="failed to get container status \"5e9198ed7331873e639b7e979d47e18d2c852caadc64a825cf7b82063fb55bbb\": rpc error: code = NotFound desc = could not find container \"5e9198ed7331873e639b7e979d47e18d2c852caadc64a825cf7b82063fb55bbb\": container with ID starting with 5e9198ed7331873e639b7e979d47e18d2c852caadc64a825cf7b82063fb55bbb not found: ID does not exist" Dec 05 06:14:13 crc kubenswrapper[4742]: I1205 06:14:13.498445 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pfd8q\" (UniqueName: \"kubernetes.io/projected/3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28-kube-api-access-pfd8q\") pod \"3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28\" (UID: \"3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28\") " Dec 05 06:14:13 crc kubenswrapper[4742]: I1205 06:14:13.498759 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28-combined-ca-bundle\") pod \"3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28\" (UID: \"3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28\") " Dec 05 06:14:13 crc kubenswrapper[4742]: I1205 06:14:13.498812 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28-config-data\") pod \"3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28\" (UID: \"3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28\") " Dec 05 06:14:13 crc kubenswrapper[4742]: I1205 06:14:13.506579 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28-kube-api-access-pfd8q" (OuterVolumeSpecName: "kube-api-access-pfd8q") pod "3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28" (UID: "3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28"). InnerVolumeSpecName "kube-api-access-pfd8q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:14:13 crc kubenswrapper[4742]: I1205 06:14:13.527376 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28" (UID: "3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:14:13 crc kubenswrapper[4742]: I1205 06:14:13.527770 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28-config-data" (OuterVolumeSpecName: "config-data") pod "3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28" (UID: "3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:14:13 crc kubenswrapper[4742]: I1205 06:14:13.603862 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pfd8q\" (UniqueName: \"kubernetes.io/projected/3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28-kube-api-access-pfd8q\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:13 crc kubenswrapper[4742]: I1205 06:14:13.603907 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:13 crc kubenswrapper[4742]: I1205 06:14:13.603920 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:13 crc kubenswrapper[4742]: I1205 06:14:13.803745 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 06:14:13 crc kubenswrapper[4742]: I1205 06:14:13.820442 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 06:14:13 crc kubenswrapper[4742]: I1205 06:14:13.831730 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 06:14:13 crc kubenswrapper[4742]: E1205 06:14:13.832360 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 06:14:13 crc kubenswrapper[4742]: I1205 06:14:13.832397 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 06:14:13 crc kubenswrapper[4742]: I1205 06:14:13.844835 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 06:14:13 crc kubenswrapper[4742]: I1205 06:14:13.846369 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:14:13 crc kubenswrapper[4742]: I1205 06:14:13.849723 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 05 06:14:13 crc kubenswrapper[4742]: I1205 06:14:13.851151 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 05 06:14:13 crc kubenswrapper[4742]: I1205 06:14:13.854815 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 05 06:14:13 crc kubenswrapper[4742]: I1205 06:14:13.866378 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 06:14:14 crc kubenswrapper[4742]: I1205 06:14:14.012642 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e07b564a-eb31-4f88-ae69-44cceef519a4-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e07b564a-eb31-4f88-ae69-44cceef519a4\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:14:14 crc kubenswrapper[4742]: I1205 06:14:14.012751 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/e07b564a-eb31-4f88-ae69-44cceef519a4-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"e07b564a-eb31-4f88-ae69-44cceef519a4\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:14:14 crc kubenswrapper[4742]: I1205 06:14:14.012831 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/e07b564a-eb31-4f88-ae69-44cceef519a4-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"e07b564a-eb31-4f88-ae69-44cceef519a4\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:14:14 crc kubenswrapper[4742]: I1205 06:14:14.012922 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e07b564a-eb31-4f88-ae69-44cceef519a4-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e07b564a-eb31-4f88-ae69-44cceef519a4\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:14:14 crc kubenswrapper[4742]: I1205 06:14:14.013187 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-78fw7\" (UniqueName: \"kubernetes.io/projected/e07b564a-eb31-4f88-ae69-44cceef519a4-kube-api-access-78fw7\") pod \"nova-cell1-novncproxy-0\" (UID: \"e07b564a-eb31-4f88-ae69-44cceef519a4\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:14:14 crc kubenswrapper[4742]: I1205 06:14:14.114489 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e07b564a-eb31-4f88-ae69-44cceef519a4-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e07b564a-eb31-4f88-ae69-44cceef519a4\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:14:14 crc kubenswrapper[4742]: I1205 06:14:14.114568 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/e07b564a-eb31-4f88-ae69-44cceef519a4-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"e07b564a-eb31-4f88-ae69-44cceef519a4\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:14:14 crc kubenswrapper[4742]: I1205 06:14:14.114619 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/e07b564a-eb31-4f88-ae69-44cceef519a4-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"e07b564a-eb31-4f88-ae69-44cceef519a4\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:14:14 crc kubenswrapper[4742]: I1205 06:14:14.114640 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e07b564a-eb31-4f88-ae69-44cceef519a4-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e07b564a-eb31-4f88-ae69-44cceef519a4\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:14:14 crc kubenswrapper[4742]: I1205 06:14:14.114688 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-78fw7\" (UniqueName: \"kubernetes.io/projected/e07b564a-eb31-4f88-ae69-44cceef519a4-kube-api-access-78fw7\") pod \"nova-cell1-novncproxy-0\" (UID: \"e07b564a-eb31-4f88-ae69-44cceef519a4\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:14:14 crc kubenswrapper[4742]: I1205 06:14:14.118803 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/e07b564a-eb31-4f88-ae69-44cceef519a4-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"e07b564a-eb31-4f88-ae69-44cceef519a4\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:14:14 crc kubenswrapper[4742]: I1205 06:14:14.119114 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e07b564a-eb31-4f88-ae69-44cceef519a4-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e07b564a-eb31-4f88-ae69-44cceef519a4\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:14:14 crc kubenswrapper[4742]: I1205 06:14:14.121474 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/e07b564a-eb31-4f88-ae69-44cceef519a4-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"e07b564a-eb31-4f88-ae69-44cceef519a4\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:14:14 crc kubenswrapper[4742]: I1205 06:14:14.126879 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e07b564a-eb31-4f88-ae69-44cceef519a4-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e07b564a-eb31-4f88-ae69-44cceef519a4\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:14:14 crc kubenswrapper[4742]: I1205 06:14:14.133665 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-78fw7\" (UniqueName: \"kubernetes.io/projected/e07b564a-eb31-4f88-ae69-44cceef519a4-kube-api-access-78fw7\") pod \"nova-cell1-novncproxy-0\" (UID: \"e07b564a-eb31-4f88-ae69-44cceef519a4\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:14:14 crc kubenswrapper[4742]: I1205 06:14:14.205251 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:14:14 crc kubenswrapper[4742]: I1205 06:14:14.400564 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28" path="/var/lib/kubelet/pods/3ae101ec-d4f6-4df7-89a2-eaf0d8ca1c28/volumes" Dec 05 06:14:14 crc kubenswrapper[4742]: I1205 06:14:14.768895 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 06:14:15 crc kubenswrapper[4742]: I1205 06:14:15.487883 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e07b564a-eb31-4f88-ae69-44cceef519a4","Type":"ContainerStarted","Data":"e3d277fe25b44b8eabf49e249cf506a11614da370c828683e2d2b01d444716d4"} Dec 05 06:14:15 crc kubenswrapper[4742]: I1205 06:14:15.488292 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e07b564a-eb31-4f88-ae69-44cceef519a4","Type":"ContainerStarted","Data":"ed2e09149f79b267b6ba274d866e9cd4359e49ce566236be2010265253607f75"} Dec 05 06:14:15 crc kubenswrapper[4742]: I1205 06:14:15.509574 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.5095576509999997 podStartE2EDuration="2.509557651s" podCreationTimestamp="2025-12-05 06:14:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:14:15.509373096 +0000 UTC m=+1331.421508168" watchObservedRunningTime="2025-12-05 06:14:15.509557651 +0000 UTC m=+1331.421692713" Dec 05 06:14:16 crc kubenswrapper[4742]: I1205 06:14:16.671119 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:14:16 crc kubenswrapper[4742]: I1205 06:14:16.671212 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:14:16 crc kubenswrapper[4742]: I1205 06:14:16.912006 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 06:14:16 crc kubenswrapper[4742]: I1205 06:14:16.912711 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 06:14:16 crc kubenswrapper[4742]: I1205 06:14:16.913530 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 06:14:16 crc kubenswrapper[4742]: I1205 06:14:16.916532 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 06:14:17 crc kubenswrapper[4742]: I1205 06:14:17.511508 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 06:14:17 crc kubenswrapper[4742]: I1205 06:14:17.517565 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 06:14:17 crc kubenswrapper[4742]: I1205 06:14:17.739029 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-kb752"] Dec 05 06:14:17 crc kubenswrapper[4742]: I1205 06:14:17.741682 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-kb752" Dec 05 06:14:17 crc kubenswrapper[4742]: I1205 06:14:17.777938 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-kb752"] Dec 05 06:14:17 crc kubenswrapper[4742]: I1205 06:14:17.893263 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8485e5ca-5372-441f-9e02-3df086991b2c-ovsdbserver-nb\") pod \"dnsmasq-dns-cd5cbd7b9-kb752\" (UID: \"8485e5ca-5372-441f-9e02-3df086991b2c\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-kb752" Dec 05 06:14:17 crc kubenswrapper[4742]: I1205 06:14:17.893381 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8485e5ca-5372-441f-9e02-3df086991b2c-config\") pod \"dnsmasq-dns-cd5cbd7b9-kb752\" (UID: \"8485e5ca-5372-441f-9e02-3df086991b2c\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-kb752" Dec 05 06:14:17 crc kubenswrapper[4742]: I1205 06:14:17.893447 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8485e5ca-5372-441f-9e02-3df086991b2c-dns-svc\") pod \"dnsmasq-dns-cd5cbd7b9-kb752\" (UID: \"8485e5ca-5372-441f-9e02-3df086991b2c\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-kb752" Dec 05 06:14:17 crc kubenswrapper[4742]: I1205 06:14:17.893482 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97db6\" (UniqueName: \"kubernetes.io/projected/8485e5ca-5372-441f-9e02-3df086991b2c-kube-api-access-97db6\") pod \"dnsmasq-dns-cd5cbd7b9-kb752\" (UID: \"8485e5ca-5372-441f-9e02-3df086991b2c\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-kb752" Dec 05 06:14:17 crc kubenswrapper[4742]: I1205 06:14:17.893525 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8485e5ca-5372-441f-9e02-3df086991b2c-ovsdbserver-sb\") pod \"dnsmasq-dns-cd5cbd7b9-kb752\" (UID: \"8485e5ca-5372-441f-9e02-3df086991b2c\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-kb752" Dec 05 06:14:17 crc kubenswrapper[4742]: I1205 06:14:17.893587 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8485e5ca-5372-441f-9e02-3df086991b2c-dns-swift-storage-0\") pod \"dnsmasq-dns-cd5cbd7b9-kb752\" (UID: \"8485e5ca-5372-441f-9e02-3df086991b2c\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-kb752" Dec 05 06:14:17 crc kubenswrapper[4742]: I1205 06:14:17.995153 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8485e5ca-5372-441f-9e02-3df086991b2c-config\") pod \"dnsmasq-dns-cd5cbd7b9-kb752\" (UID: \"8485e5ca-5372-441f-9e02-3df086991b2c\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-kb752" Dec 05 06:14:17 crc kubenswrapper[4742]: I1205 06:14:17.995228 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8485e5ca-5372-441f-9e02-3df086991b2c-dns-svc\") pod \"dnsmasq-dns-cd5cbd7b9-kb752\" (UID: \"8485e5ca-5372-441f-9e02-3df086991b2c\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-kb752" Dec 05 06:14:17 crc kubenswrapper[4742]: I1205 06:14:17.995260 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97db6\" (UniqueName: \"kubernetes.io/projected/8485e5ca-5372-441f-9e02-3df086991b2c-kube-api-access-97db6\") pod \"dnsmasq-dns-cd5cbd7b9-kb752\" (UID: \"8485e5ca-5372-441f-9e02-3df086991b2c\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-kb752" Dec 05 06:14:17 crc kubenswrapper[4742]: I1205 06:14:17.995296 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8485e5ca-5372-441f-9e02-3df086991b2c-ovsdbserver-sb\") pod \"dnsmasq-dns-cd5cbd7b9-kb752\" (UID: \"8485e5ca-5372-441f-9e02-3df086991b2c\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-kb752" Dec 05 06:14:17 crc kubenswrapper[4742]: I1205 06:14:17.995341 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8485e5ca-5372-441f-9e02-3df086991b2c-dns-swift-storage-0\") pod \"dnsmasq-dns-cd5cbd7b9-kb752\" (UID: \"8485e5ca-5372-441f-9e02-3df086991b2c\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-kb752" Dec 05 06:14:17 crc kubenswrapper[4742]: I1205 06:14:17.995406 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8485e5ca-5372-441f-9e02-3df086991b2c-ovsdbserver-nb\") pod \"dnsmasq-dns-cd5cbd7b9-kb752\" (UID: \"8485e5ca-5372-441f-9e02-3df086991b2c\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-kb752" Dec 05 06:14:17 crc kubenswrapper[4742]: I1205 06:14:17.996619 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8485e5ca-5372-441f-9e02-3df086991b2c-dns-svc\") pod \"dnsmasq-dns-cd5cbd7b9-kb752\" (UID: \"8485e5ca-5372-441f-9e02-3df086991b2c\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-kb752" Dec 05 06:14:17 crc kubenswrapper[4742]: I1205 06:14:17.996640 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8485e5ca-5372-441f-9e02-3df086991b2c-config\") pod \"dnsmasq-dns-cd5cbd7b9-kb752\" (UID: \"8485e5ca-5372-441f-9e02-3df086991b2c\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-kb752" Dec 05 06:14:17 crc kubenswrapper[4742]: I1205 06:14:17.996657 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8485e5ca-5372-441f-9e02-3df086991b2c-ovsdbserver-sb\") pod \"dnsmasq-dns-cd5cbd7b9-kb752\" (UID: \"8485e5ca-5372-441f-9e02-3df086991b2c\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-kb752" Dec 05 06:14:17 crc kubenswrapper[4742]: I1205 06:14:17.996991 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8485e5ca-5372-441f-9e02-3df086991b2c-dns-swift-storage-0\") pod \"dnsmasq-dns-cd5cbd7b9-kb752\" (UID: \"8485e5ca-5372-441f-9e02-3df086991b2c\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-kb752" Dec 05 06:14:17 crc kubenswrapper[4742]: I1205 06:14:17.997140 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8485e5ca-5372-441f-9e02-3df086991b2c-ovsdbserver-nb\") pod \"dnsmasq-dns-cd5cbd7b9-kb752\" (UID: \"8485e5ca-5372-441f-9e02-3df086991b2c\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-kb752" Dec 05 06:14:18 crc kubenswrapper[4742]: I1205 06:14:18.020366 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97db6\" (UniqueName: \"kubernetes.io/projected/8485e5ca-5372-441f-9e02-3df086991b2c-kube-api-access-97db6\") pod \"dnsmasq-dns-cd5cbd7b9-kb752\" (UID: \"8485e5ca-5372-441f-9e02-3df086991b2c\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-kb752" Dec 05 06:14:18 crc kubenswrapper[4742]: I1205 06:14:18.098194 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-kb752" Dec 05 06:14:18 crc kubenswrapper[4742]: I1205 06:14:18.573675 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-kb752"] Dec 05 06:14:19 crc kubenswrapper[4742]: I1205 06:14:19.206171 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:14:19 crc kubenswrapper[4742]: I1205 06:14:19.533684 4742 generic.go:334] "Generic (PLEG): container finished" podID="8485e5ca-5372-441f-9e02-3df086991b2c" containerID="ee00b1fde374119673d9ac5f72a08628b75db2b614f020d61712b793da9557eb" exitCode=0 Dec 05 06:14:19 crc kubenswrapper[4742]: I1205 06:14:19.533797 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-kb752" event={"ID":"8485e5ca-5372-441f-9e02-3df086991b2c","Type":"ContainerDied","Data":"ee00b1fde374119673d9ac5f72a08628b75db2b614f020d61712b793da9557eb"} Dec 05 06:14:19 crc kubenswrapper[4742]: I1205 06:14:19.533853 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-kb752" event={"ID":"8485e5ca-5372-441f-9e02-3df086991b2c","Type":"ContainerStarted","Data":"7d3c13ea158c40cc1383e521e2ecf77c4f7b88c285b0644cf06dac0450540cee"} Dec 05 06:14:19 crc kubenswrapper[4742]: I1205 06:14:19.646103 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:14:19 crc kubenswrapper[4742]: I1205 06:14:19.646412 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a731c802-f560-4b37-ad63-e8c9d1e8a9a7" containerName="ceilometer-central-agent" containerID="cri-o://803941e74cd728e40bbab2da7297f0c85e53bd820ad503c8c1f9f506c52d7374" gracePeriod=30 Dec 05 06:14:19 crc kubenswrapper[4742]: I1205 06:14:19.648447 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a731c802-f560-4b37-ad63-e8c9d1e8a9a7" containerName="proxy-httpd" containerID="cri-o://e0ded2405706529b24f387d578fd29830095302ecb7dc63dfe7fd7600f62c9ef" gracePeriod=30 Dec 05 06:14:19 crc kubenswrapper[4742]: I1205 06:14:19.648562 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a731c802-f560-4b37-ad63-e8c9d1e8a9a7" containerName="sg-core" containerID="cri-o://80926a38772beb15c7fa254590259084c23bf3a199cee64242067a26d5d26980" gracePeriod=30 Dec 05 06:14:19 crc kubenswrapper[4742]: I1205 06:14:19.648618 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a731c802-f560-4b37-ad63-e8c9d1e8a9a7" containerName="ceilometer-notification-agent" containerID="cri-o://ec17a8f9f404771e14bbf7f37d442576bace2a25cf8ed54b1e37b4984ec2fe6b" gracePeriod=30 Dec 05 06:14:19 crc kubenswrapper[4742]: I1205 06:14:19.660682 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="a731c802-f560-4b37-ad63-e8c9d1e8a9a7" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.193:3000/\": EOF" Dec 05 06:14:20 crc kubenswrapper[4742]: I1205 06:14:20.273342 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 06:14:20 crc kubenswrapper[4742]: I1205 06:14:20.546599 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-kb752" event={"ID":"8485e5ca-5372-441f-9e02-3df086991b2c","Type":"ContainerStarted","Data":"3aa39b731afc81b18e6dc9bcdfc9e62825bea59d828add91802d92234f28b7ac"} Dec 05 06:14:20 crc kubenswrapper[4742]: I1205 06:14:20.547001 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-cd5cbd7b9-kb752" Dec 05 06:14:20 crc kubenswrapper[4742]: I1205 06:14:20.548885 4742 generic.go:334] "Generic (PLEG): container finished" podID="a731c802-f560-4b37-ad63-e8c9d1e8a9a7" containerID="e0ded2405706529b24f387d578fd29830095302ecb7dc63dfe7fd7600f62c9ef" exitCode=0 Dec 05 06:14:20 crc kubenswrapper[4742]: I1205 06:14:20.548912 4742 generic.go:334] "Generic (PLEG): container finished" podID="a731c802-f560-4b37-ad63-e8c9d1e8a9a7" containerID="80926a38772beb15c7fa254590259084c23bf3a199cee64242067a26d5d26980" exitCode=2 Dec 05 06:14:20 crc kubenswrapper[4742]: I1205 06:14:20.548923 4742 generic.go:334] "Generic (PLEG): container finished" podID="a731c802-f560-4b37-ad63-e8c9d1e8a9a7" containerID="803941e74cd728e40bbab2da7297f0c85e53bd820ad503c8c1f9f506c52d7374" exitCode=0 Dec 05 06:14:20 crc kubenswrapper[4742]: I1205 06:14:20.548968 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a731c802-f560-4b37-ad63-e8c9d1e8a9a7","Type":"ContainerDied","Data":"e0ded2405706529b24f387d578fd29830095302ecb7dc63dfe7fd7600f62c9ef"} Dec 05 06:14:20 crc kubenswrapper[4742]: I1205 06:14:20.549006 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a731c802-f560-4b37-ad63-e8c9d1e8a9a7","Type":"ContainerDied","Data":"80926a38772beb15c7fa254590259084c23bf3a199cee64242067a26d5d26980"} Dec 05 06:14:20 crc kubenswrapper[4742]: I1205 06:14:20.549024 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a731c802-f560-4b37-ad63-e8c9d1e8a9a7","Type":"ContainerDied","Data":"803941e74cd728e40bbab2da7297f0c85e53bd820ad503c8c1f9f506c52d7374"} Dec 05 06:14:20 crc kubenswrapper[4742]: I1205 06:14:20.549129 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="e1987769-2798-4fb2-a360-ba285b3b73a0" containerName="nova-api-log" containerID="cri-o://bc378e75ac678ed1dd7cac3cc0c06d12b7153547b41b423c5cf928fdbf445476" gracePeriod=30 Dec 05 06:14:20 crc kubenswrapper[4742]: I1205 06:14:20.549263 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="e1987769-2798-4fb2-a360-ba285b3b73a0" containerName="nova-api-api" containerID="cri-o://470102709ea8135ed0d9784361bb33cd3a33fd0f0bfc58dfeee8b2c5d6d7174c" gracePeriod=30 Dec 05 06:14:20 crc kubenswrapper[4742]: I1205 06:14:20.572500 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-cd5cbd7b9-kb752" podStartSLOduration=3.572478505 podStartE2EDuration="3.572478505s" podCreationTimestamp="2025-12-05 06:14:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:14:20.568763267 +0000 UTC m=+1336.480898349" watchObservedRunningTime="2025-12-05 06:14:20.572478505 +0000 UTC m=+1336.484613587" Dec 05 06:14:21 crc kubenswrapper[4742]: I1205 06:14:21.561473 4742 generic.go:334] "Generic (PLEG): container finished" podID="e1987769-2798-4fb2-a360-ba285b3b73a0" containerID="bc378e75ac678ed1dd7cac3cc0c06d12b7153547b41b423c5cf928fdbf445476" exitCode=143 Dec 05 06:14:21 crc kubenswrapper[4742]: I1205 06:14:21.561564 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e1987769-2798-4fb2-a360-ba285b3b73a0","Type":"ContainerDied","Data":"bc378e75ac678ed1dd7cac3cc0c06d12b7153547b41b423c5cf928fdbf445476"} Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.106518 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.206249 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.208555 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1987769-2798-4fb2-a360-ba285b3b73a0-logs\") pod \"e1987769-2798-4fb2-a360-ba285b3b73a0\" (UID: \"e1987769-2798-4fb2-a360-ba285b3b73a0\") " Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.208623 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1987769-2798-4fb2-a360-ba285b3b73a0-config-data\") pod \"e1987769-2798-4fb2-a360-ba285b3b73a0\" (UID: \"e1987769-2798-4fb2-a360-ba285b3b73a0\") " Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.208672 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-485kt\" (UniqueName: \"kubernetes.io/projected/e1987769-2798-4fb2-a360-ba285b3b73a0-kube-api-access-485kt\") pod \"e1987769-2798-4fb2-a360-ba285b3b73a0\" (UID: \"e1987769-2798-4fb2-a360-ba285b3b73a0\") " Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.208715 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1987769-2798-4fb2-a360-ba285b3b73a0-combined-ca-bundle\") pod \"e1987769-2798-4fb2-a360-ba285b3b73a0\" (UID: \"e1987769-2798-4fb2-a360-ba285b3b73a0\") " Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.209048 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1987769-2798-4fb2-a360-ba285b3b73a0-logs" (OuterVolumeSpecName: "logs") pod "e1987769-2798-4fb2-a360-ba285b3b73a0" (UID: "e1987769-2798-4fb2-a360-ba285b3b73a0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.209841 4742 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1987769-2798-4fb2-a360-ba285b3b73a0-logs\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.216117 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1987769-2798-4fb2-a360-ba285b3b73a0-kube-api-access-485kt" (OuterVolumeSpecName: "kube-api-access-485kt") pod "e1987769-2798-4fb2-a360-ba285b3b73a0" (UID: "e1987769-2798-4fb2-a360-ba285b3b73a0"). InnerVolumeSpecName "kube-api-access-485kt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.237516 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1987769-2798-4fb2-a360-ba285b3b73a0-config-data" (OuterVolumeSpecName: "config-data") pod "e1987769-2798-4fb2-a360-ba285b3b73a0" (UID: "e1987769-2798-4fb2-a360-ba285b3b73a0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.261719 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1987769-2798-4fb2-a360-ba285b3b73a0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e1987769-2798-4fb2-a360-ba285b3b73a0" (UID: "e1987769-2798-4fb2-a360-ba285b3b73a0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.263910 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.311424 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1987769-2798-4fb2-a360-ba285b3b73a0-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.311451 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-485kt\" (UniqueName: \"kubernetes.io/projected/e1987769-2798-4fb2-a360-ba285b3b73a0-kube-api-access-485kt\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.311461 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1987769-2798-4fb2-a360-ba285b3b73a0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.597092 4742 generic.go:334] "Generic (PLEG): container finished" podID="e1987769-2798-4fb2-a360-ba285b3b73a0" containerID="470102709ea8135ed0d9784361bb33cd3a33fd0f0bfc58dfeee8b2c5d6d7174c" exitCode=0 Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.597449 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e1987769-2798-4fb2-a360-ba285b3b73a0","Type":"ContainerDied","Data":"470102709ea8135ed0d9784361bb33cd3a33fd0f0bfc58dfeee8b2c5d6d7174c"} Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.597481 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"e1987769-2798-4fb2-a360-ba285b3b73a0","Type":"ContainerDied","Data":"785a85ee9cc4404741a8cf6df92cf5d1237e1de49749eb3b90e0408408f59cbf"} Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.597499 4742 scope.go:117] "RemoveContainer" containerID="470102709ea8135ed0d9784361bb33cd3a33fd0f0bfc58dfeee8b2c5d6d7174c" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.597691 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.608455 4742 generic.go:334] "Generic (PLEG): container finished" podID="a731c802-f560-4b37-ad63-e8c9d1e8a9a7" containerID="ec17a8f9f404771e14bbf7f37d442576bace2a25cf8ed54b1e37b4984ec2fe6b" exitCode=0 Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.609982 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a731c802-f560-4b37-ad63-e8c9d1e8a9a7","Type":"ContainerDied","Data":"ec17a8f9f404771e14bbf7f37d442576bace2a25cf8ed54b1e37b4984ec2fe6b"} Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.647002 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.651809 4742 scope.go:117] "RemoveContainer" containerID="bc378e75ac678ed1dd7cac3cc0c06d12b7153547b41b423c5cf928fdbf445476" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.687020 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.704204 4742 scope.go:117] "RemoveContainer" containerID="470102709ea8135ed0d9784361bb33cd3a33fd0f0bfc58dfeee8b2c5d6d7174c" Dec 05 06:14:24 crc kubenswrapper[4742]: E1205 06:14:24.704677 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"470102709ea8135ed0d9784361bb33cd3a33fd0f0bfc58dfeee8b2c5d6d7174c\": container with ID starting with 470102709ea8135ed0d9784361bb33cd3a33fd0f0bfc58dfeee8b2c5d6d7174c not found: ID does not exist" containerID="470102709ea8135ed0d9784361bb33cd3a33fd0f0bfc58dfeee8b2c5d6d7174c" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.704702 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"470102709ea8135ed0d9784361bb33cd3a33fd0f0bfc58dfeee8b2c5d6d7174c"} err="failed to get container status \"470102709ea8135ed0d9784361bb33cd3a33fd0f0bfc58dfeee8b2c5d6d7174c\": rpc error: code = NotFound desc = could not find container \"470102709ea8135ed0d9784361bb33cd3a33fd0f0bfc58dfeee8b2c5d6d7174c\": container with ID starting with 470102709ea8135ed0d9784361bb33cd3a33fd0f0bfc58dfeee8b2c5d6d7174c not found: ID does not exist" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.704722 4742 scope.go:117] "RemoveContainer" containerID="bc378e75ac678ed1dd7cac3cc0c06d12b7153547b41b423c5cf928fdbf445476" Dec 05 06:14:24 crc kubenswrapper[4742]: E1205 06:14:24.704915 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc378e75ac678ed1dd7cac3cc0c06d12b7153547b41b423c5cf928fdbf445476\": container with ID starting with bc378e75ac678ed1dd7cac3cc0c06d12b7153547b41b423c5cf928fdbf445476 not found: ID does not exist" containerID="bc378e75ac678ed1dd7cac3cc0c06d12b7153547b41b423c5cf928fdbf445476" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.704930 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc378e75ac678ed1dd7cac3cc0c06d12b7153547b41b423c5cf928fdbf445476"} err="failed to get container status \"bc378e75ac678ed1dd7cac3cc0c06d12b7153547b41b423c5cf928fdbf445476\": rpc error: code = NotFound desc = could not find container \"bc378e75ac678ed1dd7cac3cc0c06d12b7153547b41b423c5cf928fdbf445476\": container with ID starting with bc378e75ac678ed1dd7cac3cc0c06d12b7153547b41b423c5cf928fdbf445476 not found: ID does not exist" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.707630 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.717721 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 06:14:24 crc kubenswrapper[4742]: E1205 06:14:24.718289 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1987769-2798-4fb2-a360-ba285b3b73a0" containerName="nova-api-api" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.718305 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1987769-2798-4fb2-a360-ba285b3b73a0" containerName="nova-api-api" Dec 05 06:14:24 crc kubenswrapper[4742]: E1205 06:14:24.718339 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1987769-2798-4fb2-a360-ba285b3b73a0" containerName="nova-api-log" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.718346 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1987769-2798-4fb2-a360-ba285b3b73a0" containerName="nova-api-log" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.718567 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1987769-2798-4fb2-a360-ba285b3b73a0" containerName="nova-api-log" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.718599 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1987769-2798-4fb2-a360-ba285b3b73a0" containerName="nova-api-api" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.719924 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.724506 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.724609 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.731178 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.734893 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.839222 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32d7ba7c-a4a5-4430-9907-4b53fad58d32-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"32d7ba7c-a4a5-4430-9907-4b53fad58d32\") " pod="openstack/nova-api-0" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.839290 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/32d7ba7c-a4a5-4430-9907-4b53fad58d32-internal-tls-certs\") pod \"nova-api-0\" (UID: \"32d7ba7c-a4a5-4430-9907-4b53fad58d32\") " pod="openstack/nova-api-0" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.839316 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tgql\" (UniqueName: \"kubernetes.io/projected/32d7ba7c-a4a5-4430-9907-4b53fad58d32-kube-api-access-7tgql\") pod \"nova-api-0\" (UID: \"32d7ba7c-a4a5-4430-9907-4b53fad58d32\") " pod="openstack/nova-api-0" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.839381 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32d7ba7c-a4a5-4430-9907-4b53fad58d32-logs\") pod \"nova-api-0\" (UID: \"32d7ba7c-a4a5-4430-9907-4b53fad58d32\") " pod="openstack/nova-api-0" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.839398 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/32d7ba7c-a4a5-4430-9907-4b53fad58d32-public-tls-certs\") pod \"nova-api-0\" (UID: \"32d7ba7c-a4a5-4430-9907-4b53fad58d32\") " pod="openstack/nova-api-0" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.839428 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32d7ba7c-a4a5-4430-9907-4b53fad58d32-config-data\") pod \"nova-api-0\" (UID: \"32d7ba7c-a4a5-4430-9907-4b53fad58d32\") " pod="openstack/nova-api-0" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.853485 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-wdsf6"] Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.856948 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-wdsf6" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.872345 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.872627 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.885814 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-wdsf6"] Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.941175 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32d7ba7c-a4a5-4430-9907-4b53fad58d32-logs\") pod \"nova-api-0\" (UID: \"32d7ba7c-a4a5-4430-9907-4b53fad58d32\") " pod="openstack/nova-api-0" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.941221 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/32d7ba7c-a4a5-4430-9907-4b53fad58d32-public-tls-certs\") pod \"nova-api-0\" (UID: \"32d7ba7c-a4a5-4430-9907-4b53fad58d32\") " pod="openstack/nova-api-0" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.941251 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32d7ba7c-a4a5-4430-9907-4b53fad58d32-config-data\") pod \"nova-api-0\" (UID: \"32d7ba7c-a4a5-4430-9907-4b53fad58d32\") " pod="openstack/nova-api-0" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.941330 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32d7ba7c-a4a5-4430-9907-4b53fad58d32-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"32d7ba7c-a4a5-4430-9907-4b53fad58d32\") " pod="openstack/nova-api-0" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.941371 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/32d7ba7c-a4a5-4430-9907-4b53fad58d32-internal-tls-certs\") pod \"nova-api-0\" (UID: \"32d7ba7c-a4a5-4430-9907-4b53fad58d32\") " pod="openstack/nova-api-0" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.941406 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tgql\" (UniqueName: \"kubernetes.io/projected/32d7ba7c-a4a5-4430-9907-4b53fad58d32-kube-api-access-7tgql\") pod \"nova-api-0\" (UID: \"32d7ba7c-a4a5-4430-9907-4b53fad58d32\") " pod="openstack/nova-api-0" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.941982 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32d7ba7c-a4a5-4430-9907-4b53fad58d32-logs\") pod \"nova-api-0\" (UID: \"32d7ba7c-a4a5-4430-9907-4b53fad58d32\") " pod="openstack/nova-api-0" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.946686 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/32d7ba7c-a4a5-4430-9907-4b53fad58d32-internal-tls-certs\") pod \"nova-api-0\" (UID: \"32d7ba7c-a4a5-4430-9907-4b53fad58d32\") " pod="openstack/nova-api-0" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.946970 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32d7ba7c-a4a5-4430-9907-4b53fad58d32-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"32d7ba7c-a4a5-4430-9907-4b53fad58d32\") " pod="openstack/nova-api-0" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.947344 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32d7ba7c-a4a5-4430-9907-4b53fad58d32-config-data\") pod \"nova-api-0\" (UID: \"32d7ba7c-a4a5-4430-9907-4b53fad58d32\") " pod="openstack/nova-api-0" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.961892 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tgql\" (UniqueName: \"kubernetes.io/projected/32d7ba7c-a4a5-4430-9907-4b53fad58d32-kube-api-access-7tgql\") pod \"nova-api-0\" (UID: \"32d7ba7c-a4a5-4430-9907-4b53fad58d32\") " pod="openstack/nova-api-0" Dec 05 06:14:24 crc kubenswrapper[4742]: I1205 06:14:24.962687 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/32d7ba7c-a4a5-4430-9907-4b53fad58d32-public-tls-certs\") pod \"nova-api-0\" (UID: \"32d7ba7c-a4a5-4430-9907-4b53fad58d32\") " pod="openstack/nova-api-0" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.042760 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ncjwk\" (UniqueName: \"kubernetes.io/projected/2e09e595-f2cd-4121-af36-123d55c2f729-kube-api-access-ncjwk\") pod \"nova-cell1-cell-mapping-wdsf6\" (UID: \"2e09e595-f2cd-4121-af36-123d55c2f729\") " pod="openstack/nova-cell1-cell-mapping-wdsf6" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.043120 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e09e595-f2cd-4121-af36-123d55c2f729-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-wdsf6\" (UID: \"2e09e595-f2cd-4121-af36-123d55c2f729\") " pod="openstack/nova-cell1-cell-mapping-wdsf6" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.043161 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e09e595-f2cd-4121-af36-123d55c2f729-config-data\") pod \"nova-cell1-cell-mapping-wdsf6\" (UID: \"2e09e595-f2cd-4121-af36-123d55c2f729\") " pod="openstack/nova-cell1-cell-mapping-wdsf6" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.043189 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e09e595-f2cd-4121-af36-123d55c2f729-scripts\") pod \"nova-cell1-cell-mapping-wdsf6\" (UID: \"2e09e595-f2cd-4121-af36-123d55c2f729\") " pod="openstack/nova-cell1-cell-mapping-wdsf6" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.050136 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.066113 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.144663 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e09e595-f2cd-4121-af36-123d55c2f729-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-wdsf6\" (UID: \"2e09e595-f2cd-4121-af36-123d55c2f729\") " pod="openstack/nova-cell1-cell-mapping-wdsf6" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.144726 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e09e595-f2cd-4121-af36-123d55c2f729-config-data\") pod \"nova-cell1-cell-mapping-wdsf6\" (UID: \"2e09e595-f2cd-4121-af36-123d55c2f729\") " pod="openstack/nova-cell1-cell-mapping-wdsf6" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.144762 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e09e595-f2cd-4121-af36-123d55c2f729-scripts\") pod \"nova-cell1-cell-mapping-wdsf6\" (UID: \"2e09e595-f2cd-4121-af36-123d55c2f729\") " pod="openstack/nova-cell1-cell-mapping-wdsf6" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.144888 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ncjwk\" (UniqueName: \"kubernetes.io/projected/2e09e595-f2cd-4121-af36-123d55c2f729-kube-api-access-ncjwk\") pod \"nova-cell1-cell-mapping-wdsf6\" (UID: \"2e09e595-f2cd-4121-af36-123d55c2f729\") " pod="openstack/nova-cell1-cell-mapping-wdsf6" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.149334 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e09e595-f2cd-4121-af36-123d55c2f729-config-data\") pod \"nova-cell1-cell-mapping-wdsf6\" (UID: \"2e09e595-f2cd-4121-af36-123d55c2f729\") " pod="openstack/nova-cell1-cell-mapping-wdsf6" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.151848 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e09e595-f2cd-4121-af36-123d55c2f729-scripts\") pod \"nova-cell1-cell-mapping-wdsf6\" (UID: \"2e09e595-f2cd-4121-af36-123d55c2f729\") " pod="openstack/nova-cell1-cell-mapping-wdsf6" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.153432 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e09e595-f2cd-4121-af36-123d55c2f729-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-wdsf6\" (UID: \"2e09e595-f2cd-4121-af36-123d55c2f729\") " pod="openstack/nova-cell1-cell-mapping-wdsf6" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.162774 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ncjwk\" (UniqueName: \"kubernetes.io/projected/2e09e595-f2cd-4121-af36-123d55c2f729-kube-api-access-ncjwk\") pod \"nova-cell1-cell-mapping-wdsf6\" (UID: \"2e09e595-f2cd-4121-af36-123d55c2f729\") " pod="openstack/nova-cell1-cell-mapping-wdsf6" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.196424 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-wdsf6" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.247880 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-combined-ca-bundle\") pod \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\" (UID: \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\") " Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.247931 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-config-data\") pod \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\" (UID: \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\") " Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.247963 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4hzg4\" (UniqueName: \"kubernetes.io/projected/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-kube-api-access-4hzg4\") pod \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\" (UID: \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\") " Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.247994 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-scripts\") pod \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\" (UID: \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\") " Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.248127 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-sg-core-conf-yaml\") pod \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\" (UID: \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\") " Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.248149 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-log-httpd\") pod \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\" (UID: \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\") " Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.248171 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-run-httpd\") pod \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\" (UID: \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\") " Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.248229 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-ceilometer-tls-certs\") pod \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\" (UID: \"a731c802-f560-4b37-ad63-e8c9d1e8a9a7\") " Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.249318 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a731c802-f560-4b37-ad63-e8c9d1e8a9a7" (UID: "a731c802-f560-4b37-ad63-e8c9d1e8a9a7"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.250134 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a731c802-f560-4b37-ad63-e8c9d1e8a9a7" (UID: "a731c802-f560-4b37-ad63-e8c9d1e8a9a7"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.252063 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-kube-api-access-4hzg4" (OuterVolumeSpecName: "kube-api-access-4hzg4") pod "a731c802-f560-4b37-ad63-e8c9d1e8a9a7" (UID: "a731c802-f560-4b37-ad63-e8c9d1e8a9a7"). InnerVolumeSpecName "kube-api-access-4hzg4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.258965 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-scripts" (OuterVolumeSpecName: "scripts") pod "a731c802-f560-4b37-ad63-e8c9d1e8a9a7" (UID: "a731c802-f560-4b37-ad63-e8c9d1e8a9a7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.301299 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a731c802-f560-4b37-ad63-e8c9d1e8a9a7" (UID: "a731c802-f560-4b37-ad63-e8c9d1e8a9a7"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.320954 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "a731c802-f560-4b37-ad63-e8c9d1e8a9a7" (UID: "a731c802-f560-4b37-ad63-e8c9d1e8a9a7"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.350567 4742 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.350799 4742 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.350808 4742 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.350817 4742 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.350825 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4hzg4\" (UniqueName: \"kubernetes.io/projected/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-kube-api-access-4hzg4\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.350835 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.369311 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-config-data" (OuterVolumeSpecName: "config-data") pod "a731c802-f560-4b37-ad63-e8c9d1e8a9a7" (UID: "a731c802-f560-4b37-ad63-e8c9d1e8a9a7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.370207 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a731c802-f560-4b37-ad63-e8c9d1e8a9a7" (UID: "a731c802-f560-4b37-ad63-e8c9d1e8a9a7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.452964 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.453009 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a731c802-f560-4b37-ad63-e8c9d1e8a9a7-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.524178 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.624621 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a731c802-f560-4b37-ad63-e8c9d1e8a9a7","Type":"ContainerDied","Data":"b109e94d3a223b4a640bdea0d4227c250deb7d5326f7a170419bc34d889e6934"} Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.624660 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.624671 4742 scope.go:117] "RemoveContainer" containerID="e0ded2405706529b24f387d578fd29830095302ecb7dc63dfe7fd7600f62c9ef" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.638825 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"32d7ba7c-a4a5-4430-9907-4b53fad58d32","Type":"ContainerStarted","Data":"580772f5501b24ab8ed03a21086ae132c2a83deb74cd725c9040088fb7684bb5"} Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.645905 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-wdsf6"] Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.650316 4742 scope.go:117] "RemoveContainer" containerID="80926a38772beb15c7fa254590259084c23bf3a199cee64242067a26d5d26980" Dec 05 06:14:25 crc kubenswrapper[4742]: W1205 06:14:25.660849 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2e09e595_f2cd_4121_af36_123d55c2f729.slice/crio-3f29153f38d2b970584e6c4500a7664f6649658d5ef7a002cd8fd34abf9d38d9 WatchSource:0}: Error finding container 3f29153f38d2b970584e6c4500a7664f6649658d5ef7a002cd8fd34abf9d38d9: Status 404 returned error can't find the container with id 3f29153f38d2b970584e6c4500a7664f6649658d5ef7a002cd8fd34abf9d38d9 Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.667683 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.688290 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.688966 4742 scope.go:117] "RemoveContainer" containerID="ec17a8f9f404771e14bbf7f37d442576bace2a25cf8ed54b1e37b4984ec2fe6b" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.700973 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:14:25 crc kubenswrapper[4742]: E1205 06:14:25.701417 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a731c802-f560-4b37-ad63-e8c9d1e8a9a7" containerName="proxy-httpd" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.701436 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="a731c802-f560-4b37-ad63-e8c9d1e8a9a7" containerName="proxy-httpd" Dec 05 06:14:25 crc kubenswrapper[4742]: E1205 06:14:25.701462 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a731c802-f560-4b37-ad63-e8c9d1e8a9a7" containerName="ceilometer-central-agent" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.701469 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="a731c802-f560-4b37-ad63-e8c9d1e8a9a7" containerName="ceilometer-central-agent" Dec 05 06:14:25 crc kubenswrapper[4742]: E1205 06:14:25.701495 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a731c802-f560-4b37-ad63-e8c9d1e8a9a7" containerName="ceilometer-notification-agent" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.701502 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="a731c802-f560-4b37-ad63-e8c9d1e8a9a7" containerName="ceilometer-notification-agent" Dec 05 06:14:25 crc kubenswrapper[4742]: E1205 06:14:25.701508 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a731c802-f560-4b37-ad63-e8c9d1e8a9a7" containerName="sg-core" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.701514 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="a731c802-f560-4b37-ad63-e8c9d1e8a9a7" containerName="sg-core" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.701701 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="a731c802-f560-4b37-ad63-e8c9d1e8a9a7" containerName="ceilometer-notification-agent" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.701717 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="a731c802-f560-4b37-ad63-e8c9d1e8a9a7" containerName="proxy-httpd" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.701730 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="a731c802-f560-4b37-ad63-e8c9d1e8a9a7" containerName="sg-core" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.701741 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="a731c802-f560-4b37-ad63-e8c9d1e8a9a7" containerName="ceilometer-central-agent" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.703375 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.723180 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.724763 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.724813 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.724871 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.731775 4742 scope.go:117] "RemoveContainer" containerID="803941e74cd728e40bbab2da7297f0c85e53bd820ad503c8c1f9f506c52d7374" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.860424 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\") " pod="openstack/ceilometer-0" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.860490 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\") " pod="openstack/ceilometer-0" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.860519 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-config-data\") pod \"ceilometer-0\" (UID: \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\") " pod="openstack/ceilometer-0" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.860587 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-scripts\") pod \"ceilometer-0\" (UID: \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\") " pod="openstack/ceilometer-0" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.860654 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-log-httpd\") pod \"ceilometer-0\" (UID: \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\") " pod="openstack/ceilometer-0" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.860731 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-run-httpd\") pod \"ceilometer-0\" (UID: \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\") " pod="openstack/ceilometer-0" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.860758 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\") " pod="openstack/ceilometer-0" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.860779 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmgd9\" (UniqueName: \"kubernetes.io/projected/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-kube-api-access-lmgd9\") pod \"ceilometer-0\" (UID: \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\") " pod="openstack/ceilometer-0" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.962567 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-run-httpd\") pod \"ceilometer-0\" (UID: \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\") " pod="openstack/ceilometer-0" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.962606 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\") " pod="openstack/ceilometer-0" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.962635 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmgd9\" (UniqueName: \"kubernetes.io/projected/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-kube-api-access-lmgd9\") pod \"ceilometer-0\" (UID: \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\") " pod="openstack/ceilometer-0" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.962697 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\") " pod="openstack/ceilometer-0" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.962724 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\") " pod="openstack/ceilometer-0" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.962741 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-config-data\") pod \"ceilometer-0\" (UID: \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\") " pod="openstack/ceilometer-0" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.962773 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-scripts\") pod \"ceilometer-0\" (UID: \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\") " pod="openstack/ceilometer-0" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.962822 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-log-httpd\") pod \"ceilometer-0\" (UID: \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\") " pod="openstack/ceilometer-0" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.963296 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-run-httpd\") pod \"ceilometer-0\" (UID: \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\") " pod="openstack/ceilometer-0" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.963615 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-log-httpd\") pod \"ceilometer-0\" (UID: \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\") " pod="openstack/ceilometer-0" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.967533 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\") " pod="openstack/ceilometer-0" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.967748 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\") " pod="openstack/ceilometer-0" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.968268 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\") " pod="openstack/ceilometer-0" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.968777 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-scripts\") pod \"ceilometer-0\" (UID: \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\") " pod="openstack/ceilometer-0" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.969333 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-config-data\") pod \"ceilometer-0\" (UID: \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\") " pod="openstack/ceilometer-0" Dec 05 06:14:25 crc kubenswrapper[4742]: I1205 06:14:25.983297 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmgd9\" (UniqueName: \"kubernetes.io/projected/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-kube-api-access-lmgd9\") pod \"ceilometer-0\" (UID: \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\") " pod="openstack/ceilometer-0" Dec 05 06:14:26 crc kubenswrapper[4742]: I1205 06:14:26.040012 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 06:14:26 crc kubenswrapper[4742]: I1205 06:14:26.402957 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a731c802-f560-4b37-ad63-e8c9d1e8a9a7" path="/var/lib/kubelet/pods/a731c802-f560-4b37-ad63-e8c9d1e8a9a7/volumes" Dec 05 06:14:26 crc kubenswrapper[4742]: I1205 06:14:26.404565 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1987769-2798-4fb2-a360-ba285b3b73a0" path="/var/lib/kubelet/pods/e1987769-2798-4fb2-a360-ba285b3b73a0/volumes" Dec 05 06:14:26 crc kubenswrapper[4742]: I1205 06:14:26.514220 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:14:26 crc kubenswrapper[4742]: W1205 06:14:26.530314 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod20ae73b5_51f4_4bcf_ba9c_c35f566cd07e.slice/crio-db7aa95e95eca89e07a6046673364dfc59b0c8e13e9d25dc044cc9bebbcca098 WatchSource:0}: Error finding container db7aa95e95eca89e07a6046673364dfc59b0c8e13e9d25dc044cc9bebbcca098: Status 404 returned error can't find the container with id db7aa95e95eca89e07a6046673364dfc59b0c8e13e9d25dc044cc9bebbcca098 Dec 05 06:14:26 crc kubenswrapper[4742]: I1205 06:14:26.652744 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"32d7ba7c-a4a5-4430-9907-4b53fad58d32","Type":"ContainerStarted","Data":"f7e2fe9b204e40dd812b666cfffdaf6e3381c8ea69989aaf53cc849874f9cff0"} Dec 05 06:14:26 crc kubenswrapper[4742]: I1205 06:14:26.652787 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"32d7ba7c-a4a5-4430-9907-4b53fad58d32","Type":"ContainerStarted","Data":"40d50d9fd93503050c91ec4fc65b8c910f9fde727dc82f4c64af4b33b2c7467e"} Dec 05 06:14:26 crc kubenswrapper[4742]: I1205 06:14:26.654447 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-wdsf6" event={"ID":"2e09e595-f2cd-4121-af36-123d55c2f729","Type":"ContainerStarted","Data":"3658c165323def6855dfad073a0557e3f845baf13e8fbe77d9212b3a4c2386aa"} Dec 05 06:14:26 crc kubenswrapper[4742]: I1205 06:14:26.654497 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-wdsf6" event={"ID":"2e09e595-f2cd-4121-af36-123d55c2f729","Type":"ContainerStarted","Data":"3f29153f38d2b970584e6c4500a7664f6649658d5ef7a002cd8fd34abf9d38d9"} Dec 05 06:14:26 crc kubenswrapper[4742]: I1205 06:14:26.656188 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e","Type":"ContainerStarted","Data":"db7aa95e95eca89e07a6046673364dfc59b0c8e13e9d25dc044cc9bebbcca098"} Dec 05 06:14:26 crc kubenswrapper[4742]: I1205 06:14:26.675239 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.675217902 podStartE2EDuration="2.675217902s" podCreationTimestamp="2025-12-05 06:14:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:14:26.673826706 +0000 UTC m=+1342.585961768" watchObservedRunningTime="2025-12-05 06:14:26.675217902 +0000 UTC m=+1342.587352974" Dec 05 06:14:26 crc kubenswrapper[4742]: I1205 06:14:26.694192 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-wdsf6" podStartSLOduration=2.694172242 podStartE2EDuration="2.694172242s" podCreationTimestamp="2025-12-05 06:14:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:14:26.688946065 +0000 UTC m=+1342.601081147" watchObservedRunningTime="2025-12-05 06:14:26.694172242 +0000 UTC m=+1342.606307314" Dec 05 06:14:27 crc kubenswrapper[4742]: I1205 06:14:27.672411 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e","Type":"ContainerStarted","Data":"68c5adefa894fd4e4d4330c2dd3417e387b076c9891ece3a5627225e0d0c2d97"} Dec 05 06:14:28 crc kubenswrapper[4742]: I1205 06:14:28.100229 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-cd5cbd7b9-kb752" Dec 05 06:14:28 crc kubenswrapper[4742]: I1205 06:14:28.187509 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-fp96k"] Dec 05 06:14:28 crc kubenswrapper[4742]: I1205 06:14:28.187873 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-bccf8f775-fp96k" podUID="a09bb1ea-b66c-4ae0-9a77-456a95a914b4" containerName="dnsmasq-dns" containerID="cri-o://c11115c3b48323925558468a080d253e78c7eaf8bd95984490a799595a23bae4" gracePeriod=10 Dec 05 06:14:28 crc kubenswrapper[4742]: I1205 06:14:28.704812 4742 generic.go:334] "Generic (PLEG): container finished" podID="a09bb1ea-b66c-4ae0-9a77-456a95a914b4" containerID="c11115c3b48323925558468a080d253e78c7eaf8bd95984490a799595a23bae4" exitCode=0 Dec 05 06:14:28 crc kubenswrapper[4742]: I1205 06:14:28.705154 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-fp96k" event={"ID":"a09bb1ea-b66c-4ae0-9a77-456a95a914b4","Type":"ContainerDied","Data":"c11115c3b48323925558468a080d253e78c7eaf8bd95984490a799595a23bae4"} Dec 05 06:14:28 crc kubenswrapper[4742]: I1205 06:14:28.706707 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e","Type":"ContainerStarted","Data":"a39fff3b53abe8f6e2a0ba533233662c0b7fe156e685ced3f6efd8f2c79f2d9f"} Dec 05 06:14:28 crc kubenswrapper[4742]: I1205 06:14:28.706743 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e","Type":"ContainerStarted","Data":"4c2c76854fdd4144c4cb1d5dac7df46a88805bb483cbfd5411dfc86a014d19ea"} Dec 05 06:14:28 crc kubenswrapper[4742]: I1205 06:14:28.828705 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-fp96k" Dec 05 06:14:28 crc kubenswrapper[4742]: I1205 06:14:28.922133 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-dns-svc\") pod \"a09bb1ea-b66c-4ae0-9a77-456a95a914b4\" (UID: \"a09bb1ea-b66c-4ae0-9a77-456a95a914b4\") " Dec 05 06:14:28 crc kubenswrapper[4742]: I1205 06:14:28.922265 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-dns-swift-storage-0\") pod \"a09bb1ea-b66c-4ae0-9a77-456a95a914b4\" (UID: \"a09bb1ea-b66c-4ae0-9a77-456a95a914b4\") " Dec 05 06:14:28 crc kubenswrapper[4742]: I1205 06:14:28.922366 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-ovsdbserver-sb\") pod \"a09bb1ea-b66c-4ae0-9a77-456a95a914b4\" (UID: \"a09bb1ea-b66c-4ae0-9a77-456a95a914b4\") " Dec 05 06:14:28 crc kubenswrapper[4742]: I1205 06:14:28.922410 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-ovsdbserver-nb\") pod \"a09bb1ea-b66c-4ae0-9a77-456a95a914b4\" (UID: \"a09bb1ea-b66c-4ae0-9a77-456a95a914b4\") " Dec 05 06:14:28 crc kubenswrapper[4742]: I1205 06:14:28.922450 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-config\") pod \"a09bb1ea-b66c-4ae0-9a77-456a95a914b4\" (UID: \"a09bb1ea-b66c-4ae0-9a77-456a95a914b4\") " Dec 05 06:14:28 crc kubenswrapper[4742]: I1205 06:14:28.925660 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c6z46\" (UniqueName: \"kubernetes.io/projected/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-kube-api-access-c6z46\") pod \"a09bb1ea-b66c-4ae0-9a77-456a95a914b4\" (UID: \"a09bb1ea-b66c-4ae0-9a77-456a95a914b4\") " Dec 05 06:14:28 crc kubenswrapper[4742]: I1205 06:14:28.932893 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-kube-api-access-c6z46" (OuterVolumeSpecName: "kube-api-access-c6z46") pod "a09bb1ea-b66c-4ae0-9a77-456a95a914b4" (UID: "a09bb1ea-b66c-4ae0-9a77-456a95a914b4"). InnerVolumeSpecName "kube-api-access-c6z46". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:14:29 crc kubenswrapper[4742]: I1205 06:14:29.007526 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-config" (OuterVolumeSpecName: "config") pod "a09bb1ea-b66c-4ae0-9a77-456a95a914b4" (UID: "a09bb1ea-b66c-4ae0-9a77-456a95a914b4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:14:29 crc kubenswrapper[4742]: I1205 06:14:29.023514 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a09bb1ea-b66c-4ae0-9a77-456a95a914b4" (UID: "a09bb1ea-b66c-4ae0-9a77-456a95a914b4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:14:29 crc kubenswrapper[4742]: I1205 06:14:29.028495 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c6z46\" (UniqueName: \"kubernetes.io/projected/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-kube-api-access-c6z46\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:29 crc kubenswrapper[4742]: I1205 06:14:29.028522 4742 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:29 crc kubenswrapper[4742]: I1205 06:14:29.028532 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:29 crc kubenswrapper[4742]: I1205 06:14:29.052554 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "a09bb1ea-b66c-4ae0-9a77-456a95a914b4" (UID: "a09bb1ea-b66c-4ae0-9a77-456a95a914b4"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:14:29 crc kubenswrapper[4742]: I1205 06:14:29.053589 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a09bb1ea-b66c-4ae0-9a77-456a95a914b4" (UID: "a09bb1ea-b66c-4ae0-9a77-456a95a914b4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:14:29 crc kubenswrapper[4742]: I1205 06:14:29.060045 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a09bb1ea-b66c-4ae0-9a77-456a95a914b4" (UID: "a09bb1ea-b66c-4ae0-9a77-456a95a914b4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:14:29 crc kubenswrapper[4742]: I1205 06:14:29.130318 4742 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:29 crc kubenswrapper[4742]: I1205 06:14:29.130361 4742 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:29 crc kubenswrapper[4742]: I1205 06:14:29.130373 4742 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a09bb1ea-b66c-4ae0-9a77-456a95a914b4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:29 crc kubenswrapper[4742]: I1205 06:14:29.715431 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-fp96k" event={"ID":"a09bb1ea-b66c-4ae0-9a77-456a95a914b4","Type":"ContainerDied","Data":"04640f16acfaf6c3224629a17de588a5c6c8cd341313ea5cc805d09233db7080"} Dec 05 06:14:29 crc kubenswrapper[4742]: I1205 06:14:29.715752 4742 scope.go:117] "RemoveContainer" containerID="c11115c3b48323925558468a080d253e78c7eaf8bd95984490a799595a23bae4" Dec 05 06:14:29 crc kubenswrapper[4742]: I1205 06:14:29.715870 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-fp96k" Dec 05 06:14:29 crc kubenswrapper[4742]: I1205 06:14:29.757337 4742 scope.go:117] "RemoveContainer" containerID="4c5e340425dc26b212cea3450f09d341d0737833ec2979f0ec097e0522fa766b" Dec 05 06:14:29 crc kubenswrapper[4742]: I1205 06:14:29.763536 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-fp96k"] Dec 05 06:14:29 crc kubenswrapper[4742]: I1205 06:14:29.774659 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-fp96k"] Dec 05 06:14:30 crc kubenswrapper[4742]: I1205 06:14:30.398047 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a09bb1ea-b66c-4ae0-9a77-456a95a914b4" path="/var/lib/kubelet/pods/a09bb1ea-b66c-4ae0-9a77-456a95a914b4/volumes" Dec 05 06:14:30 crc kubenswrapper[4742]: I1205 06:14:30.743969 4742 generic.go:334] "Generic (PLEG): container finished" podID="2e09e595-f2cd-4121-af36-123d55c2f729" containerID="3658c165323def6855dfad073a0557e3f845baf13e8fbe77d9212b3a4c2386aa" exitCode=0 Dec 05 06:14:30 crc kubenswrapper[4742]: I1205 06:14:30.744044 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-wdsf6" event={"ID":"2e09e595-f2cd-4121-af36-123d55c2f729","Type":"ContainerDied","Data":"3658c165323def6855dfad073a0557e3f845baf13e8fbe77d9212b3a4c2386aa"} Dec 05 06:14:30 crc kubenswrapper[4742]: I1205 06:14:30.756225 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e","Type":"ContainerStarted","Data":"2ecbc57c9c7699122ac359af92d4cd9ed8008de0c52d7893deb3493891be0b73"} Dec 05 06:14:30 crc kubenswrapper[4742]: I1205 06:14:30.757950 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 06:14:30 crc kubenswrapper[4742]: I1205 06:14:30.818739 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.54318658 podStartE2EDuration="5.818713303s" podCreationTimestamp="2025-12-05 06:14:25 +0000 UTC" firstStartedPulling="2025-12-05 06:14:26.533279088 +0000 UTC m=+1342.445414150" lastFinishedPulling="2025-12-05 06:14:29.808805811 +0000 UTC m=+1345.720940873" observedRunningTime="2025-12-05 06:14:30.812262073 +0000 UTC m=+1346.724397135" watchObservedRunningTime="2025-12-05 06:14:30.818713303 +0000 UTC m=+1346.730848365" Dec 05 06:14:32 crc kubenswrapper[4742]: I1205 06:14:32.177525 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-wdsf6" Dec 05 06:14:32 crc kubenswrapper[4742]: I1205 06:14:32.287080 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e09e595-f2cd-4121-af36-123d55c2f729-config-data\") pod \"2e09e595-f2cd-4121-af36-123d55c2f729\" (UID: \"2e09e595-f2cd-4121-af36-123d55c2f729\") " Dec 05 06:14:32 crc kubenswrapper[4742]: I1205 06:14:32.287401 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e09e595-f2cd-4121-af36-123d55c2f729-combined-ca-bundle\") pod \"2e09e595-f2cd-4121-af36-123d55c2f729\" (UID: \"2e09e595-f2cd-4121-af36-123d55c2f729\") " Dec 05 06:14:32 crc kubenswrapper[4742]: I1205 06:14:32.287516 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e09e595-f2cd-4121-af36-123d55c2f729-scripts\") pod \"2e09e595-f2cd-4121-af36-123d55c2f729\" (UID: \"2e09e595-f2cd-4121-af36-123d55c2f729\") " Dec 05 06:14:32 crc kubenswrapper[4742]: I1205 06:14:32.287623 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ncjwk\" (UniqueName: \"kubernetes.io/projected/2e09e595-f2cd-4121-af36-123d55c2f729-kube-api-access-ncjwk\") pod \"2e09e595-f2cd-4121-af36-123d55c2f729\" (UID: \"2e09e595-f2cd-4121-af36-123d55c2f729\") " Dec 05 06:14:32 crc kubenswrapper[4742]: I1205 06:14:32.300196 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e09e595-f2cd-4121-af36-123d55c2f729-scripts" (OuterVolumeSpecName: "scripts") pod "2e09e595-f2cd-4121-af36-123d55c2f729" (UID: "2e09e595-f2cd-4121-af36-123d55c2f729"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:14:32 crc kubenswrapper[4742]: I1205 06:14:32.300317 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e09e595-f2cd-4121-af36-123d55c2f729-kube-api-access-ncjwk" (OuterVolumeSpecName: "kube-api-access-ncjwk") pod "2e09e595-f2cd-4121-af36-123d55c2f729" (UID: "2e09e595-f2cd-4121-af36-123d55c2f729"). InnerVolumeSpecName "kube-api-access-ncjwk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:14:32 crc kubenswrapper[4742]: I1205 06:14:32.320242 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e09e595-f2cd-4121-af36-123d55c2f729-config-data" (OuterVolumeSpecName: "config-data") pod "2e09e595-f2cd-4121-af36-123d55c2f729" (UID: "2e09e595-f2cd-4121-af36-123d55c2f729"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:14:32 crc kubenswrapper[4742]: I1205 06:14:32.329433 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e09e595-f2cd-4121-af36-123d55c2f729-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2e09e595-f2cd-4121-af36-123d55c2f729" (UID: "2e09e595-f2cd-4121-af36-123d55c2f729"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:14:32 crc kubenswrapper[4742]: I1205 06:14:32.389476 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ncjwk\" (UniqueName: \"kubernetes.io/projected/2e09e595-f2cd-4121-af36-123d55c2f729-kube-api-access-ncjwk\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:32 crc kubenswrapper[4742]: I1205 06:14:32.389509 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e09e595-f2cd-4121-af36-123d55c2f729-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:32 crc kubenswrapper[4742]: I1205 06:14:32.389519 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e09e595-f2cd-4121-af36-123d55c2f729-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:32 crc kubenswrapper[4742]: I1205 06:14:32.389527 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e09e595-f2cd-4121-af36-123d55c2f729-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:32 crc kubenswrapper[4742]: I1205 06:14:32.779337 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-wdsf6" event={"ID":"2e09e595-f2cd-4121-af36-123d55c2f729","Type":"ContainerDied","Data":"3f29153f38d2b970584e6c4500a7664f6649658d5ef7a002cd8fd34abf9d38d9"} Dec 05 06:14:32 crc kubenswrapper[4742]: I1205 06:14:32.779402 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3f29153f38d2b970584e6c4500a7664f6649658d5ef7a002cd8fd34abf9d38d9" Dec 05 06:14:32 crc kubenswrapper[4742]: I1205 06:14:32.779356 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-wdsf6" Dec 05 06:14:32 crc kubenswrapper[4742]: I1205 06:14:32.975080 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 06:14:32 crc kubenswrapper[4742]: I1205 06:14:32.975466 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="32d7ba7c-a4a5-4430-9907-4b53fad58d32" containerName="nova-api-log" containerID="cri-o://40d50d9fd93503050c91ec4fc65b8c910f9fde727dc82f4c64af4b33b2c7467e" gracePeriod=30 Dec 05 06:14:32 crc kubenswrapper[4742]: I1205 06:14:32.975606 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="32d7ba7c-a4a5-4430-9907-4b53fad58d32" containerName="nova-api-api" containerID="cri-o://f7e2fe9b204e40dd812b666cfffdaf6e3381c8ea69989aaf53cc849874f9cff0" gracePeriod=30 Dec 05 06:14:32 crc kubenswrapper[4742]: I1205 06:14:32.989944 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 06:14:32 crc kubenswrapper[4742]: I1205 06:14:32.990355 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="5089b534-81b4-4eaf-93d2-bbc137d632d8" containerName="nova-scheduler-scheduler" containerID="cri-o://b7120a8ead8ad06ddd6fc46526565d534ca3904dff3a8f3d55bd54b819e1a84b" gracePeriod=30 Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.047316 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.047561 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="5ce4fb02-31bd-4877-8d52-bad17bc4306d" containerName="nova-metadata-log" containerID="cri-o://9e47faf85142d7378d64203f2694df0654007f888bcb3decf5297b3d4ac30e97" gracePeriod=30 Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.048008 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="5ce4fb02-31bd-4877-8d52-bad17bc4306d" containerName="nova-metadata-metadata" containerID="cri-o://6d12cb692f32a51c0a1e6bbc3a8ec059f969495d4fb0f12114196ffb3d5c92c2" gracePeriod=30 Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.651548 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.793354 4742 generic.go:334] "Generic (PLEG): container finished" podID="32d7ba7c-a4a5-4430-9907-4b53fad58d32" containerID="f7e2fe9b204e40dd812b666cfffdaf6e3381c8ea69989aaf53cc849874f9cff0" exitCode=0 Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.793687 4742 generic.go:334] "Generic (PLEG): container finished" podID="32d7ba7c-a4a5-4430-9907-4b53fad58d32" containerID="40d50d9fd93503050c91ec4fc65b8c910f9fde727dc82f4c64af4b33b2c7467e" exitCode=143 Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.793425 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.793430 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"32d7ba7c-a4a5-4430-9907-4b53fad58d32","Type":"ContainerDied","Data":"f7e2fe9b204e40dd812b666cfffdaf6e3381c8ea69989aaf53cc849874f9cff0"} Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.793811 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"32d7ba7c-a4a5-4430-9907-4b53fad58d32","Type":"ContainerDied","Data":"40d50d9fd93503050c91ec4fc65b8c910f9fde727dc82f4c64af4b33b2c7467e"} Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.793828 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"32d7ba7c-a4a5-4430-9907-4b53fad58d32","Type":"ContainerDied","Data":"580772f5501b24ab8ed03a21086ae132c2a83deb74cd725c9040088fb7684bb5"} Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.793843 4742 scope.go:117] "RemoveContainer" containerID="f7e2fe9b204e40dd812b666cfffdaf6e3381c8ea69989aaf53cc849874f9cff0" Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.796847 4742 generic.go:334] "Generic (PLEG): container finished" podID="5ce4fb02-31bd-4877-8d52-bad17bc4306d" containerID="9e47faf85142d7378d64203f2694df0654007f888bcb3decf5297b3d4ac30e97" exitCode=143 Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.796892 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5ce4fb02-31bd-4877-8d52-bad17bc4306d","Type":"ContainerDied","Data":"9e47faf85142d7378d64203f2694df0654007f888bcb3decf5297b3d4ac30e97"} Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.817918 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32d7ba7c-a4a5-4430-9907-4b53fad58d32-combined-ca-bundle\") pod \"32d7ba7c-a4a5-4430-9907-4b53fad58d32\" (UID: \"32d7ba7c-a4a5-4430-9907-4b53fad58d32\") " Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.817971 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32d7ba7c-a4a5-4430-9907-4b53fad58d32-config-data\") pod \"32d7ba7c-a4a5-4430-9907-4b53fad58d32\" (UID: \"32d7ba7c-a4a5-4430-9907-4b53fad58d32\") " Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.818040 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7tgql\" (UniqueName: \"kubernetes.io/projected/32d7ba7c-a4a5-4430-9907-4b53fad58d32-kube-api-access-7tgql\") pod \"32d7ba7c-a4a5-4430-9907-4b53fad58d32\" (UID: \"32d7ba7c-a4a5-4430-9907-4b53fad58d32\") " Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.818085 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/32d7ba7c-a4a5-4430-9907-4b53fad58d32-internal-tls-certs\") pod \"32d7ba7c-a4a5-4430-9907-4b53fad58d32\" (UID: \"32d7ba7c-a4a5-4430-9907-4b53fad58d32\") " Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.818108 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/32d7ba7c-a4a5-4430-9907-4b53fad58d32-public-tls-certs\") pod \"32d7ba7c-a4a5-4430-9907-4b53fad58d32\" (UID: \"32d7ba7c-a4a5-4430-9907-4b53fad58d32\") " Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.818191 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32d7ba7c-a4a5-4430-9907-4b53fad58d32-logs\") pod \"32d7ba7c-a4a5-4430-9907-4b53fad58d32\" (UID: \"32d7ba7c-a4a5-4430-9907-4b53fad58d32\") " Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.818861 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32d7ba7c-a4a5-4430-9907-4b53fad58d32-logs" (OuterVolumeSpecName: "logs") pod "32d7ba7c-a4a5-4430-9907-4b53fad58d32" (UID: "32d7ba7c-a4a5-4430-9907-4b53fad58d32"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.825146 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32d7ba7c-a4a5-4430-9907-4b53fad58d32-kube-api-access-7tgql" (OuterVolumeSpecName: "kube-api-access-7tgql") pod "32d7ba7c-a4a5-4430-9907-4b53fad58d32" (UID: "32d7ba7c-a4a5-4430-9907-4b53fad58d32"). InnerVolumeSpecName "kube-api-access-7tgql". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.845575 4742 scope.go:117] "RemoveContainer" containerID="40d50d9fd93503050c91ec4fc65b8c910f9fde727dc82f4c64af4b33b2c7467e" Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.857656 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32d7ba7c-a4a5-4430-9907-4b53fad58d32-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "32d7ba7c-a4a5-4430-9907-4b53fad58d32" (UID: "32d7ba7c-a4a5-4430-9907-4b53fad58d32"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.861081 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32d7ba7c-a4a5-4430-9907-4b53fad58d32-config-data" (OuterVolumeSpecName: "config-data") pod "32d7ba7c-a4a5-4430-9907-4b53fad58d32" (UID: "32d7ba7c-a4a5-4430-9907-4b53fad58d32"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.877908 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32d7ba7c-a4a5-4430-9907-4b53fad58d32-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "32d7ba7c-a4a5-4430-9907-4b53fad58d32" (UID: "32d7ba7c-a4a5-4430-9907-4b53fad58d32"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.884391 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32d7ba7c-a4a5-4430-9907-4b53fad58d32-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "32d7ba7c-a4a5-4430-9907-4b53fad58d32" (UID: "32d7ba7c-a4a5-4430-9907-4b53fad58d32"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.892016 4742 scope.go:117] "RemoveContainer" containerID="f7e2fe9b204e40dd812b666cfffdaf6e3381c8ea69989aaf53cc849874f9cff0" Dec 05 06:14:33 crc kubenswrapper[4742]: E1205 06:14:33.892528 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7e2fe9b204e40dd812b666cfffdaf6e3381c8ea69989aaf53cc849874f9cff0\": container with ID starting with f7e2fe9b204e40dd812b666cfffdaf6e3381c8ea69989aaf53cc849874f9cff0 not found: ID does not exist" containerID="f7e2fe9b204e40dd812b666cfffdaf6e3381c8ea69989aaf53cc849874f9cff0" Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.892647 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7e2fe9b204e40dd812b666cfffdaf6e3381c8ea69989aaf53cc849874f9cff0"} err="failed to get container status \"f7e2fe9b204e40dd812b666cfffdaf6e3381c8ea69989aaf53cc849874f9cff0\": rpc error: code = NotFound desc = could not find container \"f7e2fe9b204e40dd812b666cfffdaf6e3381c8ea69989aaf53cc849874f9cff0\": container with ID starting with f7e2fe9b204e40dd812b666cfffdaf6e3381c8ea69989aaf53cc849874f9cff0 not found: ID does not exist" Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.892761 4742 scope.go:117] "RemoveContainer" containerID="40d50d9fd93503050c91ec4fc65b8c910f9fde727dc82f4c64af4b33b2c7467e" Dec 05 06:14:33 crc kubenswrapper[4742]: E1205 06:14:33.893181 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40d50d9fd93503050c91ec4fc65b8c910f9fde727dc82f4c64af4b33b2c7467e\": container with ID starting with 40d50d9fd93503050c91ec4fc65b8c910f9fde727dc82f4c64af4b33b2c7467e not found: ID does not exist" containerID="40d50d9fd93503050c91ec4fc65b8c910f9fde727dc82f4c64af4b33b2c7467e" Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.893314 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40d50d9fd93503050c91ec4fc65b8c910f9fde727dc82f4c64af4b33b2c7467e"} err="failed to get container status \"40d50d9fd93503050c91ec4fc65b8c910f9fde727dc82f4c64af4b33b2c7467e\": rpc error: code = NotFound desc = could not find container \"40d50d9fd93503050c91ec4fc65b8c910f9fde727dc82f4c64af4b33b2c7467e\": container with ID starting with 40d50d9fd93503050c91ec4fc65b8c910f9fde727dc82f4c64af4b33b2c7467e not found: ID does not exist" Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.893400 4742 scope.go:117] "RemoveContainer" containerID="f7e2fe9b204e40dd812b666cfffdaf6e3381c8ea69989aaf53cc849874f9cff0" Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.893753 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7e2fe9b204e40dd812b666cfffdaf6e3381c8ea69989aaf53cc849874f9cff0"} err="failed to get container status \"f7e2fe9b204e40dd812b666cfffdaf6e3381c8ea69989aaf53cc849874f9cff0\": rpc error: code = NotFound desc = could not find container \"f7e2fe9b204e40dd812b666cfffdaf6e3381c8ea69989aaf53cc849874f9cff0\": container with ID starting with f7e2fe9b204e40dd812b666cfffdaf6e3381c8ea69989aaf53cc849874f9cff0 not found: ID does not exist" Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.893807 4742 scope.go:117] "RemoveContainer" containerID="40d50d9fd93503050c91ec4fc65b8c910f9fde727dc82f4c64af4b33b2c7467e" Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.894159 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40d50d9fd93503050c91ec4fc65b8c910f9fde727dc82f4c64af4b33b2c7467e"} err="failed to get container status \"40d50d9fd93503050c91ec4fc65b8c910f9fde727dc82f4c64af4b33b2c7467e\": rpc error: code = NotFound desc = could not find container \"40d50d9fd93503050c91ec4fc65b8c910f9fde727dc82f4c64af4b33b2c7467e\": container with ID starting with 40d50d9fd93503050c91ec4fc65b8c910f9fde727dc82f4c64af4b33b2c7467e not found: ID does not exist" Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.919843 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7tgql\" (UniqueName: \"kubernetes.io/projected/32d7ba7c-a4a5-4430-9907-4b53fad58d32-kube-api-access-7tgql\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.919879 4742 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/32d7ba7c-a4a5-4430-9907-4b53fad58d32-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.919893 4742 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/32d7ba7c-a4a5-4430-9907-4b53fad58d32-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.919905 4742 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32d7ba7c-a4a5-4430-9907-4b53fad58d32-logs\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.919920 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32d7ba7c-a4a5-4430-9907-4b53fad58d32-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:33 crc kubenswrapper[4742]: I1205 06:14:33.919931 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32d7ba7c-a4a5-4430-9907-4b53fad58d32-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.129631 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.144581 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.164434 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 06:14:34 crc kubenswrapper[4742]: E1205 06:14:34.164798 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a09bb1ea-b66c-4ae0-9a77-456a95a914b4" containerName="dnsmasq-dns" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.164816 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="a09bb1ea-b66c-4ae0-9a77-456a95a914b4" containerName="dnsmasq-dns" Dec 05 06:14:34 crc kubenswrapper[4742]: E1205 06:14:34.164828 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e09e595-f2cd-4121-af36-123d55c2f729" containerName="nova-manage" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.164835 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e09e595-f2cd-4121-af36-123d55c2f729" containerName="nova-manage" Dec 05 06:14:34 crc kubenswrapper[4742]: E1205 06:14:34.164853 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32d7ba7c-a4a5-4430-9907-4b53fad58d32" containerName="nova-api-log" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.164861 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="32d7ba7c-a4a5-4430-9907-4b53fad58d32" containerName="nova-api-log" Dec 05 06:14:34 crc kubenswrapper[4742]: E1205 06:14:34.164871 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32d7ba7c-a4a5-4430-9907-4b53fad58d32" containerName="nova-api-api" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.164876 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="32d7ba7c-a4a5-4430-9907-4b53fad58d32" containerName="nova-api-api" Dec 05 06:14:34 crc kubenswrapper[4742]: E1205 06:14:34.164892 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a09bb1ea-b66c-4ae0-9a77-456a95a914b4" containerName="init" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.164897 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="a09bb1ea-b66c-4ae0-9a77-456a95a914b4" containerName="init" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.165064 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="a09bb1ea-b66c-4ae0-9a77-456a95a914b4" containerName="dnsmasq-dns" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.165078 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="32d7ba7c-a4a5-4430-9907-4b53fad58d32" containerName="nova-api-api" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.165088 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="32d7ba7c-a4a5-4430-9907-4b53fad58d32" containerName="nova-api-log" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.165098 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e09e595-f2cd-4121-af36-123d55c2f729" containerName="nova-manage" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.165968 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.167705 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.168965 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.175970 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.178367 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.330248 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa702931-d853-4f8b-b0d8-58f5476bb7c2-config-data\") pod \"nova-api-0\" (UID: \"aa702931-d853-4f8b-b0d8-58f5476bb7c2\") " pod="openstack/nova-api-0" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.330317 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa702931-d853-4f8b-b0d8-58f5476bb7c2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"aa702931-d853-4f8b-b0d8-58f5476bb7c2\") " pod="openstack/nova-api-0" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.330366 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa702931-d853-4f8b-b0d8-58f5476bb7c2-internal-tls-certs\") pod \"nova-api-0\" (UID: \"aa702931-d853-4f8b-b0d8-58f5476bb7c2\") " pod="openstack/nova-api-0" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.330407 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa702931-d853-4f8b-b0d8-58f5476bb7c2-logs\") pod \"nova-api-0\" (UID: \"aa702931-d853-4f8b-b0d8-58f5476bb7c2\") " pod="openstack/nova-api-0" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.330574 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa702931-d853-4f8b-b0d8-58f5476bb7c2-public-tls-certs\") pod \"nova-api-0\" (UID: \"aa702931-d853-4f8b-b0d8-58f5476bb7c2\") " pod="openstack/nova-api-0" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.330660 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-744sh\" (UniqueName: \"kubernetes.io/projected/aa702931-d853-4f8b-b0d8-58f5476bb7c2-kube-api-access-744sh\") pod \"nova-api-0\" (UID: \"aa702931-d853-4f8b-b0d8-58f5476bb7c2\") " pod="openstack/nova-api-0" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.406454 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32d7ba7c-a4a5-4430-9907-4b53fad58d32" path="/var/lib/kubelet/pods/32d7ba7c-a4a5-4430-9907-4b53fad58d32/volumes" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.432366 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa702931-d853-4f8b-b0d8-58f5476bb7c2-public-tls-certs\") pod \"nova-api-0\" (UID: \"aa702931-d853-4f8b-b0d8-58f5476bb7c2\") " pod="openstack/nova-api-0" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.433171 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-744sh\" (UniqueName: \"kubernetes.io/projected/aa702931-d853-4f8b-b0d8-58f5476bb7c2-kube-api-access-744sh\") pod \"nova-api-0\" (UID: \"aa702931-d853-4f8b-b0d8-58f5476bb7c2\") " pod="openstack/nova-api-0" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.433227 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa702931-d853-4f8b-b0d8-58f5476bb7c2-config-data\") pod \"nova-api-0\" (UID: \"aa702931-d853-4f8b-b0d8-58f5476bb7c2\") " pod="openstack/nova-api-0" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.433245 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa702931-d853-4f8b-b0d8-58f5476bb7c2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"aa702931-d853-4f8b-b0d8-58f5476bb7c2\") " pod="openstack/nova-api-0" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.433263 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa702931-d853-4f8b-b0d8-58f5476bb7c2-internal-tls-certs\") pod \"nova-api-0\" (UID: \"aa702931-d853-4f8b-b0d8-58f5476bb7c2\") " pod="openstack/nova-api-0" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.433284 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa702931-d853-4f8b-b0d8-58f5476bb7c2-logs\") pod \"nova-api-0\" (UID: \"aa702931-d853-4f8b-b0d8-58f5476bb7c2\") " pod="openstack/nova-api-0" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.433621 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa702931-d853-4f8b-b0d8-58f5476bb7c2-logs\") pod \"nova-api-0\" (UID: \"aa702931-d853-4f8b-b0d8-58f5476bb7c2\") " pod="openstack/nova-api-0" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.440926 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa702931-d853-4f8b-b0d8-58f5476bb7c2-internal-tls-certs\") pod \"nova-api-0\" (UID: \"aa702931-d853-4f8b-b0d8-58f5476bb7c2\") " pod="openstack/nova-api-0" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.442798 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa702931-d853-4f8b-b0d8-58f5476bb7c2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"aa702931-d853-4f8b-b0d8-58f5476bb7c2\") " pod="openstack/nova-api-0" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.445473 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa702931-d853-4f8b-b0d8-58f5476bb7c2-config-data\") pod \"nova-api-0\" (UID: \"aa702931-d853-4f8b-b0d8-58f5476bb7c2\") " pod="openstack/nova-api-0" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.459881 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa702931-d853-4f8b-b0d8-58f5476bb7c2-public-tls-certs\") pod \"nova-api-0\" (UID: \"aa702931-d853-4f8b-b0d8-58f5476bb7c2\") " pod="openstack/nova-api-0" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.462268 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-744sh\" (UniqueName: \"kubernetes.io/projected/aa702931-d853-4f8b-b0d8-58f5476bb7c2-kube-api-access-744sh\") pod \"nova-api-0\" (UID: \"aa702931-d853-4f8b-b0d8-58f5476bb7c2\") " pod="openstack/nova-api-0" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.556655 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.823421 4742 generic.go:334] "Generic (PLEG): container finished" podID="5089b534-81b4-4eaf-93d2-bbc137d632d8" containerID="b7120a8ead8ad06ddd6fc46526565d534ca3904dff3a8f3d55bd54b819e1a84b" exitCode=0 Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.823670 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5089b534-81b4-4eaf-93d2-bbc137d632d8","Type":"ContainerDied","Data":"b7120a8ead8ad06ddd6fc46526565d534ca3904dff3a8f3d55bd54b819e1a84b"} Dec 05 06:14:34 crc kubenswrapper[4742]: I1205 06:14:34.870318 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 06:14:35 crc kubenswrapper[4742]: I1205 06:14:35.057986 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tt2wx\" (UniqueName: \"kubernetes.io/projected/5089b534-81b4-4eaf-93d2-bbc137d632d8-kube-api-access-tt2wx\") pod \"5089b534-81b4-4eaf-93d2-bbc137d632d8\" (UID: \"5089b534-81b4-4eaf-93d2-bbc137d632d8\") " Dec 05 06:14:35 crc kubenswrapper[4742]: I1205 06:14:35.058124 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5089b534-81b4-4eaf-93d2-bbc137d632d8-combined-ca-bundle\") pod \"5089b534-81b4-4eaf-93d2-bbc137d632d8\" (UID: \"5089b534-81b4-4eaf-93d2-bbc137d632d8\") " Dec 05 06:14:35 crc kubenswrapper[4742]: I1205 06:14:35.058689 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5089b534-81b4-4eaf-93d2-bbc137d632d8-config-data\") pod \"5089b534-81b4-4eaf-93d2-bbc137d632d8\" (UID: \"5089b534-81b4-4eaf-93d2-bbc137d632d8\") " Dec 05 06:14:35 crc kubenswrapper[4742]: I1205 06:14:35.065105 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5089b534-81b4-4eaf-93d2-bbc137d632d8-kube-api-access-tt2wx" (OuterVolumeSpecName: "kube-api-access-tt2wx") pod "5089b534-81b4-4eaf-93d2-bbc137d632d8" (UID: "5089b534-81b4-4eaf-93d2-bbc137d632d8"). InnerVolumeSpecName "kube-api-access-tt2wx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:14:35 crc kubenswrapper[4742]: I1205 06:14:35.087566 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5089b534-81b4-4eaf-93d2-bbc137d632d8-config-data" (OuterVolumeSpecName: "config-data") pod "5089b534-81b4-4eaf-93d2-bbc137d632d8" (UID: "5089b534-81b4-4eaf-93d2-bbc137d632d8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:14:35 crc kubenswrapper[4742]: I1205 06:14:35.091220 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5089b534-81b4-4eaf-93d2-bbc137d632d8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5089b534-81b4-4eaf-93d2-bbc137d632d8" (UID: "5089b534-81b4-4eaf-93d2-bbc137d632d8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:14:35 crc kubenswrapper[4742]: I1205 06:14:35.167380 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tt2wx\" (UniqueName: \"kubernetes.io/projected/5089b534-81b4-4eaf-93d2-bbc137d632d8-kube-api-access-tt2wx\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:35 crc kubenswrapper[4742]: I1205 06:14:35.167608 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5089b534-81b4-4eaf-93d2-bbc137d632d8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:35 crc kubenswrapper[4742]: I1205 06:14:35.167721 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5089b534-81b4-4eaf-93d2-bbc137d632d8-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:35 crc kubenswrapper[4742]: W1205 06:14:35.204669 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaa702931_d853_4f8b_b0d8_58f5476bb7c2.slice/crio-c5adff18ce5b2b510dd2ad7b169557ca839448cbbf73eb1e6c98882439081a32 WatchSource:0}: Error finding container c5adff18ce5b2b510dd2ad7b169557ca839448cbbf73eb1e6c98882439081a32: Status 404 returned error can't find the container with id c5adff18ce5b2b510dd2ad7b169557ca839448cbbf73eb1e6c98882439081a32 Dec 05 06:14:35 crc kubenswrapper[4742]: I1205 06:14:35.206113 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 06:14:35 crc kubenswrapper[4742]: I1205 06:14:35.837382 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5089b534-81b4-4eaf-93d2-bbc137d632d8","Type":"ContainerDied","Data":"9e16c67977a836d8255184bc19e072e667aa04b11bf78411619901507a8d5ac8"} Dec 05 06:14:35 crc kubenswrapper[4742]: I1205 06:14:35.837984 4742 scope.go:117] "RemoveContainer" containerID="b7120a8ead8ad06ddd6fc46526565d534ca3904dff3a8f3d55bd54b819e1a84b" Dec 05 06:14:35 crc kubenswrapper[4742]: I1205 06:14:35.837444 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 06:14:35 crc kubenswrapper[4742]: I1205 06:14:35.844506 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"aa702931-d853-4f8b-b0d8-58f5476bb7c2","Type":"ContainerStarted","Data":"1f4b1e5b484c4b109f9165cf542665fc7a93e90318c7b4dd1ddb7da94d8a3032"} Dec 05 06:14:35 crc kubenswrapper[4742]: I1205 06:14:35.844571 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"aa702931-d853-4f8b-b0d8-58f5476bb7c2","Type":"ContainerStarted","Data":"69ab6c82edf2ce10d8987c3b5a9194e318538298be58a196bccfa67b05fbaae7"} Dec 05 06:14:35 crc kubenswrapper[4742]: I1205 06:14:35.844592 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"aa702931-d853-4f8b-b0d8-58f5476bb7c2","Type":"ContainerStarted","Data":"c5adff18ce5b2b510dd2ad7b169557ca839448cbbf73eb1e6c98882439081a32"} Dec 05 06:14:35 crc kubenswrapper[4742]: I1205 06:14:35.887374 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.88735246 podStartE2EDuration="1.88735246s" podCreationTimestamp="2025-12-05 06:14:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:14:35.872420406 +0000 UTC m=+1351.784555528" watchObservedRunningTime="2025-12-05 06:14:35.88735246 +0000 UTC m=+1351.799487542" Dec 05 06:14:35 crc kubenswrapper[4742]: I1205 06:14:35.918143 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 06:14:35 crc kubenswrapper[4742]: I1205 06:14:35.935810 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 06:14:35 crc kubenswrapper[4742]: I1205 06:14:35.952271 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 06:14:35 crc kubenswrapper[4742]: E1205 06:14:35.952707 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5089b534-81b4-4eaf-93d2-bbc137d632d8" containerName="nova-scheduler-scheduler" Dec 05 06:14:35 crc kubenswrapper[4742]: I1205 06:14:35.952728 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="5089b534-81b4-4eaf-93d2-bbc137d632d8" containerName="nova-scheduler-scheduler" Dec 05 06:14:35 crc kubenswrapper[4742]: I1205 06:14:35.952950 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="5089b534-81b4-4eaf-93d2-bbc137d632d8" containerName="nova-scheduler-scheduler" Dec 05 06:14:35 crc kubenswrapper[4742]: I1205 06:14:35.953552 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 06:14:35 crc kubenswrapper[4742]: I1205 06:14:35.956296 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 06:14:35 crc kubenswrapper[4742]: I1205 06:14:35.956475 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.085907 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/931816fd-7570-46ac-b555-368b196b030c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"931816fd-7570-46ac-b555-368b196b030c\") " pod="openstack/nova-scheduler-0" Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.086050 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/931816fd-7570-46ac-b555-368b196b030c-config-data\") pod \"nova-scheduler-0\" (UID: \"931816fd-7570-46ac-b555-368b196b030c\") " pod="openstack/nova-scheduler-0" Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.086091 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j56l5\" (UniqueName: \"kubernetes.io/projected/931816fd-7570-46ac-b555-368b196b030c-kube-api-access-j56l5\") pod \"nova-scheduler-0\" (UID: \"931816fd-7570-46ac-b555-368b196b030c\") " pod="openstack/nova-scheduler-0" Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.187330 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/931816fd-7570-46ac-b555-368b196b030c-config-data\") pod \"nova-scheduler-0\" (UID: \"931816fd-7570-46ac-b555-368b196b030c\") " pod="openstack/nova-scheduler-0" Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.187408 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j56l5\" (UniqueName: \"kubernetes.io/projected/931816fd-7570-46ac-b555-368b196b030c-kube-api-access-j56l5\") pod \"nova-scheduler-0\" (UID: \"931816fd-7570-46ac-b555-368b196b030c\") " pod="openstack/nova-scheduler-0" Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.187471 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/931816fd-7570-46ac-b555-368b196b030c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"931816fd-7570-46ac-b555-368b196b030c\") " pod="openstack/nova-scheduler-0" Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.192949 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/931816fd-7570-46ac-b555-368b196b030c-config-data\") pod \"nova-scheduler-0\" (UID: \"931816fd-7570-46ac-b555-368b196b030c\") " pod="openstack/nova-scheduler-0" Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.193431 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/931816fd-7570-46ac-b555-368b196b030c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"931816fd-7570-46ac-b555-368b196b030c\") " pod="openstack/nova-scheduler-0" Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.210089 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j56l5\" (UniqueName: \"kubernetes.io/projected/931816fd-7570-46ac-b555-368b196b030c-kube-api-access-j56l5\") pod \"nova-scheduler-0\" (UID: \"931816fd-7570-46ac-b555-368b196b030c\") " pod="openstack/nova-scheduler-0" Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.239787 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="5ce4fb02-31bd-4877-8d52-bad17bc4306d" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.189:8775/\": read tcp 10.217.0.2:44392->10.217.0.189:8775: read: connection reset by peer" Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.240181 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="5ce4fb02-31bd-4877-8d52-bad17bc4306d" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.189:8775/\": read tcp 10.217.0.2:44398->10.217.0.189:8775: read: connection reset by peer" Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.298917 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.396579 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5089b534-81b4-4eaf-93d2-bbc137d632d8" path="/var/lib/kubelet/pods/5089b534-81b4-4eaf-93d2-bbc137d632d8/volumes" Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.601238 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.652961 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.798159 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6zr7m\" (UniqueName: \"kubernetes.io/projected/5ce4fb02-31bd-4877-8d52-bad17bc4306d-kube-api-access-6zr7m\") pod \"5ce4fb02-31bd-4877-8d52-bad17bc4306d\" (UID: \"5ce4fb02-31bd-4877-8d52-bad17bc4306d\") " Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.798538 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ce4fb02-31bd-4877-8d52-bad17bc4306d-logs\") pod \"5ce4fb02-31bd-4877-8d52-bad17bc4306d\" (UID: \"5ce4fb02-31bd-4877-8d52-bad17bc4306d\") " Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.798617 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ce4fb02-31bd-4877-8d52-bad17bc4306d-combined-ca-bundle\") pod \"5ce4fb02-31bd-4877-8d52-bad17bc4306d\" (UID: \"5ce4fb02-31bd-4877-8d52-bad17bc4306d\") " Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.798700 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ce4fb02-31bd-4877-8d52-bad17bc4306d-nova-metadata-tls-certs\") pod \"5ce4fb02-31bd-4877-8d52-bad17bc4306d\" (UID: \"5ce4fb02-31bd-4877-8d52-bad17bc4306d\") " Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.798780 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ce4fb02-31bd-4877-8d52-bad17bc4306d-config-data\") pod \"5ce4fb02-31bd-4877-8d52-bad17bc4306d\" (UID: \"5ce4fb02-31bd-4877-8d52-bad17bc4306d\") " Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.799494 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ce4fb02-31bd-4877-8d52-bad17bc4306d-logs" (OuterVolumeSpecName: "logs") pod "5ce4fb02-31bd-4877-8d52-bad17bc4306d" (UID: "5ce4fb02-31bd-4877-8d52-bad17bc4306d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.805517 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ce4fb02-31bd-4877-8d52-bad17bc4306d-kube-api-access-6zr7m" (OuterVolumeSpecName: "kube-api-access-6zr7m") pod "5ce4fb02-31bd-4877-8d52-bad17bc4306d" (UID: "5ce4fb02-31bd-4877-8d52-bad17bc4306d"). InnerVolumeSpecName "kube-api-access-6zr7m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.831741 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ce4fb02-31bd-4877-8d52-bad17bc4306d-config-data" (OuterVolumeSpecName: "config-data") pod "5ce4fb02-31bd-4877-8d52-bad17bc4306d" (UID: "5ce4fb02-31bd-4877-8d52-bad17bc4306d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.841647 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ce4fb02-31bd-4877-8d52-bad17bc4306d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5ce4fb02-31bd-4877-8d52-bad17bc4306d" (UID: "5ce4fb02-31bd-4877-8d52-bad17bc4306d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.857324 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"931816fd-7570-46ac-b555-368b196b030c","Type":"ContainerStarted","Data":"19a84075a4a5b72bcffa6b3cad99511d70c5764348b285fdef52052f3200d9ee"} Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.857395 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"931816fd-7570-46ac-b555-368b196b030c","Type":"ContainerStarted","Data":"e8e2523c90a163b16a9aee5217ec72bd7e15cbfe79e812a2935d7b6aad45ddfc"} Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.860376 4742 generic.go:334] "Generic (PLEG): container finished" podID="5ce4fb02-31bd-4877-8d52-bad17bc4306d" containerID="6d12cb692f32a51c0a1e6bbc3a8ec059f969495d4fb0f12114196ffb3d5c92c2" exitCode=0 Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.860454 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5ce4fb02-31bd-4877-8d52-bad17bc4306d","Type":"ContainerDied","Data":"6d12cb692f32a51c0a1e6bbc3a8ec059f969495d4fb0f12114196ffb3d5c92c2"} Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.860463 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.860492 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5ce4fb02-31bd-4877-8d52-bad17bc4306d","Type":"ContainerDied","Data":"d135163237b8d5d6a27dd4329967bbc24a91ffa614592a9818a291114947ea8a"} Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.860509 4742 scope.go:117] "RemoveContainer" containerID="6d12cb692f32a51c0a1e6bbc3a8ec059f969495d4fb0f12114196ffb3d5c92c2" Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.864397 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ce4fb02-31bd-4877-8d52-bad17bc4306d-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "5ce4fb02-31bd-4877-8d52-bad17bc4306d" (UID: "5ce4fb02-31bd-4877-8d52-bad17bc4306d"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.883602 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.883580162 podStartE2EDuration="1.883580162s" podCreationTimestamp="2025-12-05 06:14:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:14:36.880426009 +0000 UTC m=+1352.792561071" watchObservedRunningTime="2025-12-05 06:14:36.883580162 +0000 UTC m=+1352.795715224" Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.892567 4742 scope.go:117] "RemoveContainer" containerID="9e47faf85142d7378d64203f2694df0654007f888bcb3decf5297b3d4ac30e97" Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.900576 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ce4fb02-31bd-4877-8d52-bad17bc4306d-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.900637 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6zr7m\" (UniqueName: \"kubernetes.io/projected/5ce4fb02-31bd-4877-8d52-bad17bc4306d-kube-api-access-6zr7m\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.900690 4742 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ce4fb02-31bd-4877-8d52-bad17bc4306d-logs\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.900797 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ce4fb02-31bd-4877-8d52-bad17bc4306d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.900847 4742 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5ce4fb02-31bd-4877-8d52-bad17bc4306d-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.909828 4742 scope.go:117] "RemoveContainer" containerID="6d12cb692f32a51c0a1e6bbc3a8ec059f969495d4fb0f12114196ffb3d5c92c2" Dec 05 06:14:36 crc kubenswrapper[4742]: E1205 06:14:36.910310 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d12cb692f32a51c0a1e6bbc3a8ec059f969495d4fb0f12114196ffb3d5c92c2\": container with ID starting with 6d12cb692f32a51c0a1e6bbc3a8ec059f969495d4fb0f12114196ffb3d5c92c2 not found: ID does not exist" containerID="6d12cb692f32a51c0a1e6bbc3a8ec059f969495d4fb0f12114196ffb3d5c92c2" Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.910361 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d12cb692f32a51c0a1e6bbc3a8ec059f969495d4fb0f12114196ffb3d5c92c2"} err="failed to get container status \"6d12cb692f32a51c0a1e6bbc3a8ec059f969495d4fb0f12114196ffb3d5c92c2\": rpc error: code = NotFound desc = could not find container \"6d12cb692f32a51c0a1e6bbc3a8ec059f969495d4fb0f12114196ffb3d5c92c2\": container with ID starting with 6d12cb692f32a51c0a1e6bbc3a8ec059f969495d4fb0f12114196ffb3d5c92c2 not found: ID does not exist" Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.910388 4742 scope.go:117] "RemoveContainer" containerID="9e47faf85142d7378d64203f2694df0654007f888bcb3decf5297b3d4ac30e97" Dec 05 06:14:36 crc kubenswrapper[4742]: E1205 06:14:36.910739 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e47faf85142d7378d64203f2694df0654007f888bcb3decf5297b3d4ac30e97\": container with ID starting with 9e47faf85142d7378d64203f2694df0654007f888bcb3decf5297b3d4ac30e97 not found: ID does not exist" containerID="9e47faf85142d7378d64203f2694df0654007f888bcb3decf5297b3d4ac30e97" Dec 05 06:14:36 crc kubenswrapper[4742]: I1205 06:14:36.910775 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e47faf85142d7378d64203f2694df0654007f888bcb3decf5297b3d4ac30e97"} err="failed to get container status \"9e47faf85142d7378d64203f2694df0654007f888bcb3decf5297b3d4ac30e97\": rpc error: code = NotFound desc = could not find container \"9e47faf85142d7378d64203f2694df0654007f888bcb3decf5297b3d4ac30e97\": container with ID starting with 9e47faf85142d7378d64203f2694df0654007f888bcb3decf5297b3d4ac30e97 not found: ID does not exist" Dec 05 06:14:37 crc kubenswrapper[4742]: I1205 06:14:37.266496 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 06:14:37 crc kubenswrapper[4742]: I1205 06:14:37.295452 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 06:14:37 crc kubenswrapper[4742]: I1205 06:14:37.307678 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 06:14:37 crc kubenswrapper[4742]: E1205 06:14:37.308210 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ce4fb02-31bd-4877-8d52-bad17bc4306d" containerName="nova-metadata-log" Dec 05 06:14:37 crc kubenswrapper[4742]: I1205 06:14:37.308231 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ce4fb02-31bd-4877-8d52-bad17bc4306d" containerName="nova-metadata-log" Dec 05 06:14:37 crc kubenswrapper[4742]: E1205 06:14:37.308269 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ce4fb02-31bd-4877-8d52-bad17bc4306d" containerName="nova-metadata-metadata" Dec 05 06:14:37 crc kubenswrapper[4742]: I1205 06:14:37.308278 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ce4fb02-31bd-4877-8d52-bad17bc4306d" containerName="nova-metadata-metadata" Dec 05 06:14:37 crc kubenswrapper[4742]: I1205 06:14:37.308507 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ce4fb02-31bd-4877-8d52-bad17bc4306d" containerName="nova-metadata-metadata" Dec 05 06:14:37 crc kubenswrapper[4742]: I1205 06:14:37.308542 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ce4fb02-31bd-4877-8d52-bad17bc4306d" containerName="nova-metadata-log" Dec 05 06:14:37 crc kubenswrapper[4742]: I1205 06:14:37.309847 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 06:14:37 crc kubenswrapper[4742]: I1205 06:14:37.314393 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 06:14:37 crc kubenswrapper[4742]: I1205 06:14:37.318295 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 06:14:37 crc kubenswrapper[4742]: I1205 06:14:37.334372 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 06:14:37 crc kubenswrapper[4742]: I1205 06:14:37.413209 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7038cd99-8151-4157-93c6-3b7f5b9ce25e-logs\") pod \"nova-metadata-0\" (UID: \"7038cd99-8151-4157-93c6-3b7f5b9ce25e\") " pod="openstack/nova-metadata-0" Dec 05 06:14:37 crc kubenswrapper[4742]: I1205 06:14:37.413321 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7038cd99-8151-4157-93c6-3b7f5b9ce25e-config-data\") pod \"nova-metadata-0\" (UID: \"7038cd99-8151-4157-93c6-3b7f5b9ce25e\") " pod="openstack/nova-metadata-0" Dec 05 06:14:37 crc kubenswrapper[4742]: I1205 06:14:37.413393 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pn9h6\" (UniqueName: \"kubernetes.io/projected/7038cd99-8151-4157-93c6-3b7f5b9ce25e-kube-api-access-pn9h6\") pod \"nova-metadata-0\" (UID: \"7038cd99-8151-4157-93c6-3b7f5b9ce25e\") " pod="openstack/nova-metadata-0" Dec 05 06:14:37 crc kubenswrapper[4742]: I1205 06:14:37.413596 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7038cd99-8151-4157-93c6-3b7f5b9ce25e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"7038cd99-8151-4157-93c6-3b7f5b9ce25e\") " pod="openstack/nova-metadata-0" Dec 05 06:14:37 crc kubenswrapper[4742]: I1205 06:14:37.413716 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7038cd99-8151-4157-93c6-3b7f5b9ce25e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7038cd99-8151-4157-93c6-3b7f5b9ce25e\") " pod="openstack/nova-metadata-0" Dec 05 06:14:37 crc kubenswrapper[4742]: I1205 06:14:37.516248 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7038cd99-8151-4157-93c6-3b7f5b9ce25e-logs\") pod \"nova-metadata-0\" (UID: \"7038cd99-8151-4157-93c6-3b7f5b9ce25e\") " pod="openstack/nova-metadata-0" Dec 05 06:14:37 crc kubenswrapper[4742]: I1205 06:14:37.516327 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7038cd99-8151-4157-93c6-3b7f5b9ce25e-config-data\") pod \"nova-metadata-0\" (UID: \"7038cd99-8151-4157-93c6-3b7f5b9ce25e\") " pod="openstack/nova-metadata-0" Dec 05 06:14:37 crc kubenswrapper[4742]: I1205 06:14:37.516360 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pn9h6\" (UniqueName: \"kubernetes.io/projected/7038cd99-8151-4157-93c6-3b7f5b9ce25e-kube-api-access-pn9h6\") pod \"nova-metadata-0\" (UID: \"7038cd99-8151-4157-93c6-3b7f5b9ce25e\") " pod="openstack/nova-metadata-0" Dec 05 06:14:37 crc kubenswrapper[4742]: I1205 06:14:37.516439 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7038cd99-8151-4157-93c6-3b7f5b9ce25e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"7038cd99-8151-4157-93c6-3b7f5b9ce25e\") " pod="openstack/nova-metadata-0" Dec 05 06:14:37 crc kubenswrapper[4742]: I1205 06:14:37.516508 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7038cd99-8151-4157-93c6-3b7f5b9ce25e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7038cd99-8151-4157-93c6-3b7f5b9ce25e\") " pod="openstack/nova-metadata-0" Dec 05 06:14:37 crc kubenswrapper[4742]: I1205 06:14:37.516855 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7038cd99-8151-4157-93c6-3b7f5b9ce25e-logs\") pod \"nova-metadata-0\" (UID: \"7038cd99-8151-4157-93c6-3b7f5b9ce25e\") " pod="openstack/nova-metadata-0" Dec 05 06:14:37 crc kubenswrapper[4742]: I1205 06:14:37.521893 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7038cd99-8151-4157-93c6-3b7f5b9ce25e-config-data\") pod \"nova-metadata-0\" (UID: \"7038cd99-8151-4157-93c6-3b7f5b9ce25e\") " pod="openstack/nova-metadata-0" Dec 05 06:14:37 crc kubenswrapper[4742]: I1205 06:14:37.522177 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7038cd99-8151-4157-93c6-3b7f5b9ce25e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7038cd99-8151-4157-93c6-3b7f5b9ce25e\") " pod="openstack/nova-metadata-0" Dec 05 06:14:37 crc kubenswrapper[4742]: I1205 06:14:37.527322 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7038cd99-8151-4157-93c6-3b7f5b9ce25e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"7038cd99-8151-4157-93c6-3b7f5b9ce25e\") " pod="openstack/nova-metadata-0" Dec 05 06:14:37 crc kubenswrapper[4742]: I1205 06:14:37.536172 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pn9h6\" (UniqueName: \"kubernetes.io/projected/7038cd99-8151-4157-93c6-3b7f5b9ce25e-kube-api-access-pn9h6\") pod \"nova-metadata-0\" (UID: \"7038cd99-8151-4157-93c6-3b7f5b9ce25e\") " pod="openstack/nova-metadata-0" Dec 05 06:14:37 crc kubenswrapper[4742]: I1205 06:14:37.631256 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 06:14:38 crc kubenswrapper[4742]: I1205 06:14:38.112866 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 06:14:38 crc kubenswrapper[4742]: I1205 06:14:38.432441 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ce4fb02-31bd-4877-8d52-bad17bc4306d" path="/var/lib/kubelet/pods/5ce4fb02-31bd-4877-8d52-bad17bc4306d/volumes" Dec 05 06:14:38 crc kubenswrapper[4742]: I1205 06:14:38.882306 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7038cd99-8151-4157-93c6-3b7f5b9ce25e","Type":"ContainerStarted","Data":"4d42993853ddd3815008a6e598dffed4d9fa4416bef732ddaba8c8e33025a533"} Dec 05 06:14:38 crc kubenswrapper[4742]: I1205 06:14:38.882343 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7038cd99-8151-4157-93c6-3b7f5b9ce25e","Type":"ContainerStarted","Data":"5786613a0ea271c49e36cb812feba2d04a12fd32ef5d2c4e0ebfce2f557616b6"} Dec 05 06:14:38 crc kubenswrapper[4742]: I1205 06:14:38.882353 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7038cd99-8151-4157-93c6-3b7f5b9ce25e","Type":"ContainerStarted","Data":"d7387779bc1bd76b8ee3f45aaae09d36d4c74b32b6e2a8f786877661d91ac905"} Dec 05 06:14:38 crc kubenswrapper[4742]: I1205 06:14:38.905121 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=1.9050966310000002 podStartE2EDuration="1.905096631s" podCreationTimestamp="2025-12-05 06:14:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:14:38.899828412 +0000 UTC m=+1354.811963504" watchObservedRunningTime="2025-12-05 06:14:38.905096631 +0000 UTC m=+1354.817231713" Dec 05 06:14:41 crc kubenswrapper[4742]: I1205 06:14:41.299448 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 06:14:42 crc kubenswrapper[4742]: I1205 06:14:42.632161 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 06:14:42 crc kubenswrapper[4742]: I1205 06:14:42.632525 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 06:14:44 crc kubenswrapper[4742]: I1205 06:14:44.557855 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 06:14:44 crc kubenswrapper[4742]: I1205 06:14:44.558395 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 06:14:45 crc kubenswrapper[4742]: I1205 06:14:45.574336 4742 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="aa702931-d853-4f8b-b0d8-58f5476bb7c2" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.199:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 06:14:45 crc kubenswrapper[4742]: I1205 06:14:45.574331 4742 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="aa702931-d853-4f8b-b0d8-58f5476bb7c2" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.199:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 06:14:46 crc kubenswrapper[4742]: I1205 06:14:46.299339 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 06:14:46 crc kubenswrapper[4742]: I1205 06:14:46.346900 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 06:14:46 crc kubenswrapper[4742]: I1205 06:14:46.671126 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:14:46 crc kubenswrapper[4742]: I1205 06:14:46.671199 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:14:47 crc kubenswrapper[4742]: I1205 06:14:47.033312 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 06:14:47 crc kubenswrapper[4742]: I1205 06:14:47.631745 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 06:14:47 crc kubenswrapper[4742]: I1205 06:14:47.633556 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 06:14:48 crc kubenswrapper[4742]: I1205 06:14:48.643388 4742 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="7038cd99-8151-4157-93c6-3b7f5b9ce25e" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.201:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 06:14:48 crc kubenswrapper[4742]: I1205 06:14:48.643412 4742 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="7038cd99-8151-4157-93c6-3b7f5b9ce25e" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.201:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 06:14:54 crc kubenswrapper[4742]: I1205 06:14:54.564155 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 06:14:54 crc kubenswrapper[4742]: I1205 06:14:54.564923 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 06:14:54 crc kubenswrapper[4742]: I1205 06:14:54.571793 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 06:14:54 crc kubenswrapper[4742]: I1205 06:14:54.571854 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 06:14:55 crc kubenswrapper[4742]: I1205 06:14:55.104453 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 06:14:55 crc kubenswrapper[4742]: I1205 06:14:55.110343 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 06:14:56 crc kubenswrapper[4742]: I1205 06:14:56.052232 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 06:14:57 crc kubenswrapper[4742]: I1205 06:14:57.638334 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 06:14:57 crc kubenswrapper[4742]: I1205 06:14:57.644768 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 06:14:57 crc kubenswrapper[4742]: I1205 06:14:57.650158 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 06:14:58 crc kubenswrapper[4742]: I1205 06:14:58.145846 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 06:15:00 crc kubenswrapper[4742]: I1205 06:15:00.183828 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415255-4sqxj"] Dec 05 06:15:00 crc kubenswrapper[4742]: I1205 06:15:00.185210 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-4sqxj" Dec 05 06:15:00 crc kubenswrapper[4742]: I1205 06:15:00.187766 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 06:15:00 crc kubenswrapper[4742]: I1205 06:15:00.190109 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 06:15:00 crc kubenswrapper[4742]: I1205 06:15:00.201425 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415255-4sqxj"] Dec 05 06:15:00 crc kubenswrapper[4742]: I1205 06:15:00.304136 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1a6d05c8-96c6-4ee1-b695-753e5af543bc-config-volume\") pod \"collect-profiles-29415255-4sqxj\" (UID: \"1a6d05c8-96c6-4ee1-b695-753e5af543bc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-4sqxj" Dec 05 06:15:00 crc kubenswrapper[4742]: I1205 06:15:00.304191 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdgjv\" (UniqueName: \"kubernetes.io/projected/1a6d05c8-96c6-4ee1-b695-753e5af543bc-kube-api-access-hdgjv\") pod \"collect-profiles-29415255-4sqxj\" (UID: \"1a6d05c8-96c6-4ee1-b695-753e5af543bc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-4sqxj" Dec 05 06:15:00 crc kubenswrapper[4742]: I1205 06:15:00.304249 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1a6d05c8-96c6-4ee1-b695-753e5af543bc-secret-volume\") pod \"collect-profiles-29415255-4sqxj\" (UID: \"1a6d05c8-96c6-4ee1-b695-753e5af543bc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-4sqxj" Dec 05 06:15:00 crc kubenswrapper[4742]: I1205 06:15:00.405876 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1a6d05c8-96c6-4ee1-b695-753e5af543bc-secret-volume\") pod \"collect-profiles-29415255-4sqxj\" (UID: \"1a6d05c8-96c6-4ee1-b695-753e5af543bc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-4sqxj" Dec 05 06:15:00 crc kubenswrapper[4742]: I1205 06:15:00.406327 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1a6d05c8-96c6-4ee1-b695-753e5af543bc-config-volume\") pod \"collect-profiles-29415255-4sqxj\" (UID: \"1a6d05c8-96c6-4ee1-b695-753e5af543bc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-4sqxj" Dec 05 06:15:00 crc kubenswrapper[4742]: I1205 06:15:00.406358 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdgjv\" (UniqueName: \"kubernetes.io/projected/1a6d05c8-96c6-4ee1-b695-753e5af543bc-kube-api-access-hdgjv\") pod \"collect-profiles-29415255-4sqxj\" (UID: \"1a6d05c8-96c6-4ee1-b695-753e5af543bc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-4sqxj" Dec 05 06:15:00 crc kubenswrapper[4742]: I1205 06:15:00.407406 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1a6d05c8-96c6-4ee1-b695-753e5af543bc-config-volume\") pod \"collect-profiles-29415255-4sqxj\" (UID: \"1a6d05c8-96c6-4ee1-b695-753e5af543bc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-4sqxj" Dec 05 06:15:00 crc kubenswrapper[4742]: I1205 06:15:00.417836 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1a6d05c8-96c6-4ee1-b695-753e5af543bc-secret-volume\") pod \"collect-profiles-29415255-4sqxj\" (UID: \"1a6d05c8-96c6-4ee1-b695-753e5af543bc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-4sqxj" Dec 05 06:15:00 crc kubenswrapper[4742]: I1205 06:15:00.423487 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdgjv\" (UniqueName: \"kubernetes.io/projected/1a6d05c8-96c6-4ee1-b695-753e5af543bc-kube-api-access-hdgjv\") pod \"collect-profiles-29415255-4sqxj\" (UID: \"1a6d05c8-96c6-4ee1-b695-753e5af543bc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-4sqxj" Dec 05 06:15:00 crc kubenswrapper[4742]: I1205 06:15:00.509643 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-4sqxj" Dec 05 06:15:00 crc kubenswrapper[4742]: I1205 06:15:00.952163 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415255-4sqxj"] Dec 05 06:15:01 crc kubenswrapper[4742]: I1205 06:15:01.168758 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-4sqxj" event={"ID":"1a6d05c8-96c6-4ee1-b695-753e5af543bc","Type":"ContainerStarted","Data":"e60559e697c4bcd742a29188dfc9b8629d47208eb15203406bb86012cbf04b6b"} Dec 05 06:15:01 crc kubenswrapper[4742]: I1205 06:15:01.168807 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-4sqxj" event={"ID":"1a6d05c8-96c6-4ee1-b695-753e5af543bc","Type":"ContainerStarted","Data":"d434c719ec600ab860ac06221dbc7c59f5915d6c6d6a7fa933c061975ad06fbc"} Dec 05 06:15:02 crc kubenswrapper[4742]: I1205 06:15:02.185220 4742 generic.go:334] "Generic (PLEG): container finished" podID="1a6d05c8-96c6-4ee1-b695-753e5af543bc" containerID="e60559e697c4bcd742a29188dfc9b8629d47208eb15203406bb86012cbf04b6b" exitCode=0 Dec 05 06:15:02 crc kubenswrapper[4742]: I1205 06:15:02.185283 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-4sqxj" event={"ID":"1a6d05c8-96c6-4ee1-b695-753e5af543bc","Type":"ContainerDied","Data":"e60559e697c4bcd742a29188dfc9b8629d47208eb15203406bb86012cbf04b6b"} Dec 05 06:15:03 crc kubenswrapper[4742]: I1205 06:15:03.654712 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-4sqxj" Dec 05 06:15:03 crc kubenswrapper[4742]: I1205 06:15:03.778681 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1a6d05c8-96c6-4ee1-b695-753e5af543bc-config-volume\") pod \"1a6d05c8-96c6-4ee1-b695-753e5af543bc\" (UID: \"1a6d05c8-96c6-4ee1-b695-753e5af543bc\") " Dec 05 06:15:03 crc kubenswrapper[4742]: I1205 06:15:03.778769 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hdgjv\" (UniqueName: \"kubernetes.io/projected/1a6d05c8-96c6-4ee1-b695-753e5af543bc-kube-api-access-hdgjv\") pod \"1a6d05c8-96c6-4ee1-b695-753e5af543bc\" (UID: \"1a6d05c8-96c6-4ee1-b695-753e5af543bc\") " Dec 05 06:15:03 crc kubenswrapper[4742]: I1205 06:15:03.778824 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1a6d05c8-96c6-4ee1-b695-753e5af543bc-secret-volume\") pod \"1a6d05c8-96c6-4ee1-b695-753e5af543bc\" (UID: \"1a6d05c8-96c6-4ee1-b695-753e5af543bc\") " Dec 05 06:15:03 crc kubenswrapper[4742]: I1205 06:15:03.779635 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a6d05c8-96c6-4ee1-b695-753e5af543bc-config-volume" (OuterVolumeSpecName: "config-volume") pod "1a6d05c8-96c6-4ee1-b695-753e5af543bc" (UID: "1a6d05c8-96c6-4ee1-b695-753e5af543bc"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:03 crc kubenswrapper[4742]: I1205 06:15:03.784157 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a6d05c8-96c6-4ee1-b695-753e5af543bc-kube-api-access-hdgjv" (OuterVolumeSpecName: "kube-api-access-hdgjv") pod "1a6d05c8-96c6-4ee1-b695-753e5af543bc" (UID: "1a6d05c8-96c6-4ee1-b695-753e5af543bc"). InnerVolumeSpecName "kube-api-access-hdgjv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:03 crc kubenswrapper[4742]: I1205 06:15:03.789412 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a6d05c8-96c6-4ee1-b695-753e5af543bc-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "1a6d05c8-96c6-4ee1-b695-753e5af543bc" (UID: "1a6d05c8-96c6-4ee1-b695-753e5af543bc"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:03 crc kubenswrapper[4742]: I1205 06:15:03.881401 4742 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1a6d05c8-96c6-4ee1-b695-753e5af543bc-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:03 crc kubenswrapper[4742]: I1205 06:15:03.881438 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hdgjv\" (UniqueName: \"kubernetes.io/projected/1a6d05c8-96c6-4ee1-b695-753e5af543bc-kube-api-access-hdgjv\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:03 crc kubenswrapper[4742]: I1205 06:15:03.881449 4742 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1a6d05c8-96c6-4ee1-b695-753e5af543bc-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:04 crc kubenswrapper[4742]: I1205 06:15:04.218360 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-4sqxj" event={"ID":"1a6d05c8-96c6-4ee1-b695-753e5af543bc","Type":"ContainerDied","Data":"d434c719ec600ab860ac06221dbc7c59f5915d6c6d6a7fa933c061975ad06fbc"} Dec 05 06:15:04 crc kubenswrapper[4742]: I1205 06:15:04.218404 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d434c719ec600ab860ac06221dbc7c59f5915d6c6d6a7fa933c061975ad06fbc" Dec 05 06:15:04 crc kubenswrapper[4742]: I1205 06:15:04.218436 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415255-4sqxj" Dec 05 06:15:04 crc kubenswrapper[4742]: I1205 06:15:04.911847 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-z77c8"] Dec 05 06:15:04 crc kubenswrapper[4742]: E1205 06:15:04.912638 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a6d05c8-96c6-4ee1-b695-753e5af543bc" containerName="collect-profiles" Dec 05 06:15:04 crc kubenswrapper[4742]: I1205 06:15:04.912653 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a6d05c8-96c6-4ee1-b695-753e5af543bc" containerName="collect-profiles" Dec 05 06:15:04 crc kubenswrapper[4742]: I1205 06:15:04.912858 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a6d05c8-96c6-4ee1-b695-753e5af543bc" containerName="collect-profiles" Dec 05 06:15:04 crc kubenswrapper[4742]: I1205 06:15:04.914360 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z77c8" Dec 05 06:15:04 crc kubenswrapper[4742]: I1205 06:15:04.935487 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-z77c8"] Dec 05 06:15:05 crc kubenswrapper[4742]: I1205 06:15:05.002785 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d74mn\" (UniqueName: \"kubernetes.io/projected/439adfba-ae29-4db5-8a77-88eede9d0bd9-kube-api-access-d74mn\") pod \"redhat-operators-z77c8\" (UID: \"439adfba-ae29-4db5-8a77-88eede9d0bd9\") " pod="openshift-marketplace/redhat-operators-z77c8" Dec 05 06:15:05 crc kubenswrapper[4742]: I1205 06:15:05.002845 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/439adfba-ae29-4db5-8a77-88eede9d0bd9-catalog-content\") pod \"redhat-operators-z77c8\" (UID: \"439adfba-ae29-4db5-8a77-88eede9d0bd9\") " pod="openshift-marketplace/redhat-operators-z77c8" Dec 05 06:15:05 crc kubenswrapper[4742]: I1205 06:15:05.003154 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/439adfba-ae29-4db5-8a77-88eede9d0bd9-utilities\") pod \"redhat-operators-z77c8\" (UID: \"439adfba-ae29-4db5-8a77-88eede9d0bd9\") " pod="openshift-marketplace/redhat-operators-z77c8" Dec 05 06:15:05 crc kubenswrapper[4742]: I1205 06:15:05.105427 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d74mn\" (UniqueName: \"kubernetes.io/projected/439adfba-ae29-4db5-8a77-88eede9d0bd9-kube-api-access-d74mn\") pod \"redhat-operators-z77c8\" (UID: \"439adfba-ae29-4db5-8a77-88eede9d0bd9\") " pod="openshift-marketplace/redhat-operators-z77c8" Dec 05 06:15:05 crc kubenswrapper[4742]: I1205 06:15:05.105480 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/439adfba-ae29-4db5-8a77-88eede9d0bd9-catalog-content\") pod \"redhat-operators-z77c8\" (UID: \"439adfba-ae29-4db5-8a77-88eede9d0bd9\") " pod="openshift-marketplace/redhat-operators-z77c8" Dec 05 06:15:05 crc kubenswrapper[4742]: I1205 06:15:05.105559 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/439adfba-ae29-4db5-8a77-88eede9d0bd9-utilities\") pod \"redhat-operators-z77c8\" (UID: \"439adfba-ae29-4db5-8a77-88eede9d0bd9\") " pod="openshift-marketplace/redhat-operators-z77c8" Dec 05 06:15:05 crc kubenswrapper[4742]: I1205 06:15:05.106185 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/439adfba-ae29-4db5-8a77-88eede9d0bd9-utilities\") pod \"redhat-operators-z77c8\" (UID: \"439adfba-ae29-4db5-8a77-88eede9d0bd9\") " pod="openshift-marketplace/redhat-operators-z77c8" Dec 05 06:15:05 crc kubenswrapper[4742]: I1205 06:15:05.107355 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/439adfba-ae29-4db5-8a77-88eede9d0bd9-catalog-content\") pod \"redhat-operators-z77c8\" (UID: \"439adfba-ae29-4db5-8a77-88eede9d0bd9\") " pod="openshift-marketplace/redhat-operators-z77c8" Dec 05 06:15:05 crc kubenswrapper[4742]: I1205 06:15:05.122613 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d74mn\" (UniqueName: \"kubernetes.io/projected/439adfba-ae29-4db5-8a77-88eede9d0bd9-kube-api-access-d74mn\") pod \"redhat-operators-z77c8\" (UID: \"439adfba-ae29-4db5-8a77-88eede9d0bd9\") " pod="openshift-marketplace/redhat-operators-z77c8" Dec 05 06:15:05 crc kubenswrapper[4742]: I1205 06:15:05.243515 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z77c8" Dec 05 06:15:05 crc kubenswrapper[4742]: I1205 06:15:05.572281 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-z77c8"] Dec 05 06:15:06 crc kubenswrapper[4742]: I1205 06:15:06.236519 4742 generic.go:334] "Generic (PLEG): container finished" podID="439adfba-ae29-4db5-8a77-88eede9d0bd9" containerID="638f39f3b69197ade380ce0500b746ede74a9207587b901a2b07034aa6f65aef" exitCode=0 Dec 05 06:15:06 crc kubenswrapper[4742]: I1205 06:15:06.236618 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z77c8" event={"ID":"439adfba-ae29-4db5-8a77-88eede9d0bd9","Type":"ContainerDied","Data":"638f39f3b69197ade380ce0500b746ede74a9207587b901a2b07034aa6f65aef"} Dec 05 06:15:06 crc kubenswrapper[4742]: I1205 06:15:06.236850 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z77c8" event={"ID":"439adfba-ae29-4db5-8a77-88eede9d0bd9","Type":"ContainerStarted","Data":"8b89c9f2b2504d5c4cee100630a761ee3633b3e0923a5d4ea3c4d2cf35dd3a16"} Dec 05 06:15:06 crc kubenswrapper[4742]: I1205 06:15:06.238300 4742 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 06:15:07 crc kubenswrapper[4742]: I1205 06:15:07.250602 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z77c8" event={"ID":"439adfba-ae29-4db5-8a77-88eede9d0bd9","Type":"ContainerStarted","Data":"69825182fb26d31bf0e53f36361245e9235ba3311974d988fd1997b2ec68f6cb"} Dec 05 06:15:09 crc kubenswrapper[4742]: I1205 06:15:09.277379 4742 generic.go:334] "Generic (PLEG): container finished" podID="439adfba-ae29-4db5-8a77-88eede9d0bd9" containerID="69825182fb26d31bf0e53f36361245e9235ba3311974d988fd1997b2ec68f6cb" exitCode=0 Dec 05 06:15:09 crc kubenswrapper[4742]: I1205 06:15:09.277482 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z77c8" event={"ID":"439adfba-ae29-4db5-8a77-88eede9d0bd9","Type":"ContainerDied","Data":"69825182fb26d31bf0e53f36361245e9235ba3311974d988fd1997b2ec68f6cb"} Dec 05 06:15:10 crc kubenswrapper[4742]: I1205 06:15:10.295810 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z77c8" event={"ID":"439adfba-ae29-4db5-8a77-88eede9d0bd9","Type":"ContainerStarted","Data":"c5027492da075e5f7eb6f411b5685735804aba663f81c22f48a7032edb179797"} Dec 05 06:15:10 crc kubenswrapper[4742]: I1205 06:15:10.327103 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-z77c8" podStartSLOduration=2.5573277709999997 podStartE2EDuration="6.327083971s" podCreationTimestamp="2025-12-05 06:15:04 +0000 UTC" firstStartedPulling="2025-12-05 06:15:06.237999036 +0000 UTC m=+1382.150134098" lastFinishedPulling="2025-12-05 06:15:10.007755196 +0000 UTC m=+1385.919890298" observedRunningTime="2025-12-05 06:15:10.324910853 +0000 UTC m=+1386.237045915" watchObservedRunningTime="2025-12-05 06:15:10.327083971 +0000 UTC m=+1386.239219033" Dec 05 06:15:15 crc kubenswrapper[4742]: I1205 06:15:15.244581 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-z77c8" Dec 05 06:15:15 crc kubenswrapper[4742]: I1205 06:15:15.245248 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-z77c8" Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.034797 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.035043 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="7f1e3dac-5031-4dfe-815c-1c1b447f0d64" containerName="openstackclient" containerID="cri-o://0b24c5bc7a890ac19ac54b2ce282a48fb159a89cb8f1121185c3c1f4ebc77ba4" gracePeriod=2 Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.050937 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.280096 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-whtm9"] Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.280310 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-metrics-whtm9" podUID="ebded868-aaf1-4294-bec1-ec504cdf1810" containerName="openstack-network-exporter" containerID="cri-o://a1893c421b54a7a4f57b1e37935b532c024cdde49cad64c136f9c853dae146fa" gracePeriod=30 Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.293760 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-tgnp6"] Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.309654 4742 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-z77c8" podUID="439adfba-ae29-4db5-8a77-88eede9d0bd9" containerName="registry-server" probeResult="failure" output=< Dec 05 06:15:16 crc kubenswrapper[4742]: timeout: failed to connect service ":50051" within 1s Dec 05 06:15:16 crc kubenswrapper[4742]: > Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.369867 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-9n84z"] Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.555571 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.607174 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glancebc83-account-delete-wqtlb"] Dec 05 06:15:16 crc kubenswrapper[4742]: E1205 06:15:16.607766 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f1e3dac-5031-4dfe-815c-1c1b447f0d64" containerName="openstackclient" Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.607788 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f1e3dac-5031-4dfe-815c-1c1b447f0d64" containerName="openstackclient" Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.608075 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f1e3dac-5031-4dfe-815c-1c1b447f0d64" containerName="openstackclient" Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.608951 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glancebc83-account-delete-wqtlb" Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.625154 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinderedde-account-delete-pwsvp"] Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.627150 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinderedde-account-delete-pwsvp" Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.651293 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glancebc83-account-delete-wqtlb"] Dec 05 06:15:16 crc kubenswrapper[4742]: E1205 06:15:16.669728 4742 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Dec 05 06:15:16 crc kubenswrapper[4742]: E1205 06:15:16.669784 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7b5d8165-e06e-4600-9cab-9cf84c010725-config-data podName:7b5d8165-e06e-4600-9cab-9cf84c010725 nodeName:}" failed. No retries permitted until 2025-12-05 06:15:17.169768538 +0000 UTC m=+1393.081903600 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/7b5d8165-e06e-4600-9cab-9cf84c010725-config-data") pod "rabbitmq-cell1-server-0" (UID: "7b5d8165-e06e-4600-9cab-9cf84c010725") : configmap "rabbitmq-cell1-config-data" not found Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.672467 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.672509 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.672544 4742 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.672944 4742 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2a0650cb5fb1ecf5b2a54d2428e362d6056d9793a00fef4de45d1cd9dff294dd"} pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.672987 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" containerID="cri-o://2a0650cb5fb1ecf5b2a54d2428e362d6056d9793a00fef4de45d1cd9dff294dd" gracePeriod=600 Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.682166 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinderedde-account-delete-pwsvp"] Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.775099 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4227032-1b4c-4059-b91f-cf5ece6b20b2-operator-scripts\") pod \"glancebc83-account-delete-wqtlb\" (UID: \"c4227032-1b4c-4059-b91f-cf5ece6b20b2\") " pod="openstack/glancebc83-account-delete-wqtlb" Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.775255 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45v2w\" (UniqueName: \"kubernetes.io/projected/6a5ca1f6-73b0-43da-82c6-995495666585-kube-api-access-45v2w\") pod \"cinderedde-account-delete-pwsvp\" (UID: \"6a5ca1f6-73b0-43da-82c6-995495666585\") " pod="openstack/cinderedde-account-delete-pwsvp" Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.775278 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7l9tj\" (UniqueName: \"kubernetes.io/projected/c4227032-1b4c-4059-b91f-cf5ece6b20b2-kube-api-access-7l9tj\") pod \"glancebc83-account-delete-wqtlb\" (UID: \"c4227032-1b4c-4059-b91f-cf5ece6b20b2\") " pod="openstack/glancebc83-account-delete-wqtlb" Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.775296 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6a5ca1f6-73b0-43da-82c6-995495666585-operator-scripts\") pod \"cinderedde-account-delete-pwsvp\" (UID: \"6a5ca1f6-73b0-43da-82c6-995495666585\") " pod="openstack/cinderedde-account-delete-pwsvp" Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.809123 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron5be2-account-delete-l9sfl"] Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.810330 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron5be2-account-delete-l9sfl" Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.816931 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron5be2-account-delete-l9sfl"] Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.876960 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4227032-1b4c-4059-b91f-cf5ece6b20b2-operator-scripts\") pod \"glancebc83-account-delete-wqtlb\" (UID: \"c4227032-1b4c-4059-b91f-cf5ece6b20b2\") " pod="openstack/glancebc83-account-delete-wqtlb" Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.877474 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8qp5w\" (UniqueName: \"kubernetes.io/projected/4f2ab762-07a0-426d-a84a-a53ad7e2fef0-kube-api-access-8qp5w\") pod \"neutron5be2-account-delete-l9sfl\" (UID: \"4f2ab762-07a0-426d-a84a-a53ad7e2fef0\") " pod="openstack/neutron5be2-account-delete-l9sfl" Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.877572 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f2ab762-07a0-426d-a84a-a53ad7e2fef0-operator-scripts\") pod \"neutron5be2-account-delete-l9sfl\" (UID: \"4f2ab762-07a0-426d-a84a-a53ad7e2fef0\") " pod="openstack/neutron5be2-account-delete-l9sfl" Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.877596 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45v2w\" (UniqueName: \"kubernetes.io/projected/6a5ca1f6-73b0-43da-82c6-995495666585-kube-api-access-45v2w\") pod \"cinderedde-account-delete-pwsvp\" (UID: \"6a5ca1f6-73b0-43da-82c6-995495666585\") " pod="openstack/cinderedde-account-delete-pwsvp" Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.877622 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7l9tj\" (UniqueName: \"kubernetes.io/projected/c4227032-1b4c-4059-b91f-cf5ece6b20b2-kube-api-access-7l9tj\") pod \"glancebc83-account-delete-wqtlb\" (UID: \"c4227032-1b4c-4059-b91f-cf5ece6b20b2\") " pod="openstack/glancebc83-account-delete-wqtlb" Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.877641 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6a5ca1f6-73b0-43da-82c6-995495666585-operator-scripts\") pod \"cinderedde-account-delete-pwsvp\" (UID: \"6a5ca1f6-73b0-43da-82c6-995495666585\") " pod="openstack/cinderedde-account-delete-pwsvp" Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.878037 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4227032-1b4c-4059-b91f-cf5ece6b20b2-operator-scripts\") pod \"glancebc83-account-delete-wqtlb\" (UID: \"c4227032-1b4c-4059-b91f-cf5ece6b20b2\") " pod="openstack/glancebc83-account-delete-wqtlb" Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.878354 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6a5ca1f6-73b0-43da-82c6-995495666585-operator-scripts\") pod \"cinderedde-account-delete-pwsvp\" (UID: \"6a5ca1f6-73b0-43da-82c6-995495666585\") " pod="openstack/cinderedde-account-delete-pwsvp" Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.964026 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbicanf9da-account-delete-4bv5f"] Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.966376 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbicanf9da-account-delete-4bv5f" Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.979672 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f2ab762-07a0-426d-a84a-a53ad7e2fef0-operator-scripts\") pod \"neutron5be2-account-delete-l9sfl\" (UID: \"4f2ab762-07a0-426d-a84a-a53ad7e2fef0\") " pod="openstack/neutron5be2-account-delete-l9sfl" Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.979785 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8qp5w\" (UniqueName: \"kubernetes.io/projected/4f2ab762-07a0-426d-a84a-a53ad7e2fef0-kube-api-access-8qp5w\") pod \"neutron5be2-account-delete-l9sfl\" (UID: \"4f2ab762-07a0-426d-a84a-a53ad7e2fef0\") " pod="openstack/neutron5be2-account-delete-l9sfl" Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.990576 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f2ab762-07a0-426d-a84a-a53ad7e2fef0-operator-scripts\") pod \"neutron5be2-account-delete-l9sfl\" (UID: \"4f2ab762-07a0-426d-a84a-a53ad7e2fef0\") " pod="openstack/neutron5be2-account-delete-l9sfl" Dec 05 06:15:16 crc kubenswrapper[4742]: I1205 06:15:16.992183 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-f8hfs"] Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.000657 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbicanf9da-account-delete-4bv5f"] Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.006667 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45v2w\" (UniqueName: \"kubernetes.io/projected/6a5ca1f6-73b0-43da-82c6-995495666585-kube-api-access-45v2w\") pod \"cinderedde-account-delete-pwsvp\" (UID: \"6a5ca1f6-73b0-43da-82c6-995495666585\") " pod="openstack/cinderedde-account-delete-pwsvp" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.007185 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7l9tj\" (UniqueName: \"kubernetes.io/projected/c4227032-1b4c-4059-b91f-cf5ece6b20b2-kube-api-access-7l9tj\") pod \"glancebc83-account-delete-wqtlb\" (UID: \"c4227032-1b4c-4059-b91f-cf5ece6b20b2\") " pod="openstack/glancebc83-account-delete-wqtlb" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.074702 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-f8hfs"] Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.076279 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinderedde-account-delete-pwsvp" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.092580 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.092908 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="e7d76df0-4f21-4729-9729-1f2ff54a8332" containerName="ovn-northd" containerID="cri-o://caa9cfd6937fda940888bb64cbccaa6adf27580ea4e177e3d3adf4b5e4e8b93d" gracePeriod=30 Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.093345 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="e7d76df0-4f21-4729-9729-1f2ff54a8332" containerName="openstack-network-exporter" containerID="cri-o://d1934753cd07a71a87a5b0d5f8a6aecf1b11e12621dc578d17d0fc95dbb8f143" gracePeriod=30 Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.093549 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8qp5w\" (UniqueName: \"kubernetes.io/projected/4f2ab762-07a0-426d-a84a-a53ad7e2fef0-kube-api-access-8qp5w\") pod \"neutron5be2-account-delete-l9sfl\" (UID: \"4f2ab762-07a0-426d-a84a-a53ad7e2fef0\") " pod="openstack/neutron5be2-account-delete-l9sfl" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.095217 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptthf\" (UniqueName: \"kubernetes.io/projected/61c4b9e1-5266-49eb-8348-3b1034562185-kube-api-access-ptthf\") pod \"barbicanf9da-account-delete-4bv5f\" (UID: \"61c4b9e1-5266-49eb-8348-3b1034562185\") " pod="openstack/barbicanf9da-account-delete-4bv5f" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.095259 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61c4b9e1-5266-49eb-8348-3b1034562185-operator-scripts\") pod \"barbicanf9da-account-delete-4bv5f\" (UID: \"61c4b9e1-5266-49eb-8348-3b1034562185\") " pod="openstack/barbicanf9da-account-delete-4bv5f" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.106594 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.134492 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement9800-account-delete-mlmb7"] Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.144189 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement9800-account-delete-mlmb7" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.145931 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement9800-account-delete-mlmb7"] Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.197562 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptthf\" (UniqueName: \"kubernetes.io/projected/61c4b9e1-5266-49eb-8348-3b1034562185-kube-api-access-ptthf\") pod \"barbicanf9da-account-delete-4bv5f\" (UID: \"61c4b9e1-5266-49eb-8348-3b1034562185\") " pod="openstack/barbicanf9da-account-delete-4bv5f" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.197663 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61c4b9e1-5266-49eb-8348-3b1034562185-operator-scripts\") pod \"barbicanf9da-account-delete-4bv5f\" (UID: \"61c4b9e1-5266-49eb-8348-3b1034562185\") " pod="openstack/barbicanf9da-account-delete-4bv5f" Dec 05 06:15:17 crc kubenswrapper[4742]: E1205 06:15:17.200961 4742 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Dec 05 06:15:17 crc kubenswrapper[4742]: E1205 06:15:17.201013 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-config-data podName:d6b096f4-483e-48c5-a3e1-a178c0c5ae6e nodeName:}" failed. No retries permitted until 2025-12-05 06:15:17.700997432 +0000 UTC m=+1393.613132494 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-config-data") pod "rabbitmq-server-0" (UID: "d6b096f4-483e-48c5-a3e1-a178c0c5ae6e") : configmap "rabbitmq-config-data" not found Dec 05 06:15:17 crc kubenswrapper[4742]: E1205 06:15:17.201180 4742 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Dec 05 06:15:17 crc kubenswrapper[4742]: E1205 06:15:17.201269 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7b5d8165-e06e-4600-9cab-9cf84c010725-config-data podName:7b5d8165-e06e-4600-9cab-9cf84c010725 nodeName:}" failed. No retries permitted until 2025-12-05 06:15:18.201248919 +0000 UTC m=+1394.113383981 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/7b5d8165-e06e-4600-9cab-9cf84c010725-config-data") pod "rabbitmq-cell1-server-0" (UID: "7b5d8165-e06e-4600-9cab-9cf84c010725") : configmap "rabbitmq-cell1-config-data" not found Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.211264 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61c4b9e1-5266-49eb-8348-3b1034562185-operator-scripts\") pod \"barbicanf9da-account-delete-4bv5f\" (UID: \"61c4b9e1-5266-49eb-8348-3b1034562185\") " pod="openstack/barbicanf9da-account-delete-4bv5f" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.234519 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron5be2-account-delete-l9sfl" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.252133 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-4svn8"] Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.267414 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptthf\" (UniqueName: \"kubernetes.io/projected/61c4b9e1-5266-49eb-8348-3b1034562185-kube-api-access-ptthf\") pod \"barbicanf9da-account-delete-4bv5f\" (UID: \"61c4b9e1-5266-49eb-8348-3b1034562185\") " pod="openstack/barbicanf9da-account-delete-4bv5f" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.289750 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glancebc83-account-delete-wqtlb" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.300373 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9p64\" (UniqueName: \"kubernetes.io/projected/8b956518-9768-477f-9acb-1fc3459427f7-kube-api-access-n9p64\") pod \"placement9800-account-delete-mlmb7\" (UID: \"8b956518-9768-477f-9acb-1fc3459427f7\") " pod="openstack/placement9800-account-delete-mlmb7" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.300414 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b956518-9768-477f-9acb-1fc3459427f7-operator-scripts\") pod \"placement9800-account-delete-mlmb7\" (UID: \"8b956518-9768-477f-9acb-1fc3459427f7\") " pod="openstack/placement9800-account-delete-mlmb7" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.318424 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-4svn8"] Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.342133 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.342776 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="b8e993d8-0221-4214-b00a-ca745e716bbe" containerName="openstack-network-exporter" containerID="cri-o://3fa7e542679f95885cdd8ab99d9224e870ebcce54df90ffe1a480a639c7703e9" gracePeriod=300 Dec 05 06:15:17 crc kubenswrapper[4742]: E1205 06:15:17.368262 4742 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err="command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: " execCommand=["/usr/share/ovn/scripts/ovn-ctl","stop_controller"] containerName="ovn-controller" pod="openstack/ovn-controller-9n84z" message=< Dec 05 06:15:17 crc kubenswrapper[4742]: Exiting ovn-controller (1) [ OK ] Dec 05 06:15:17 crc kubenswrapper[4742]: > Dec 05 06:15:17 crc kubenswrapper[4742]: E1205 06:15:17.368301 4742 kuberuntime_container.go:691] "PreStop hook failed" err="command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: " pod="openstack/ovn-controller-9n84z" podUID="b5df8784-b63d-41b7-a542-dcf53ea6cc5e" containerName="ovn-controller" containerID="cri-o://54edd9b1ddd6e1ba491286ac963077f358b82344ad584ecba6ecb84b4f7da42c" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.368335 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-9n84z" podUID="b5df8784-b63d-41b7-a542-dcf53ea6cc5e" containerName="ovn-controller" containerID="cri-o://54edd9b1ddd6e1ba491286ac963077f358b82344ad584ecba6ecb84b4f7da42c" gracePeriod=30 Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.382225 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novaapie0d5-account-delete-294ls"] Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.383817 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novaapie0d5-account-delete-294ls" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.392855 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novaapie0d5-account-delete-294ls"] Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.406783 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9p64\" (UniqueName: \"kubernetes.io/projected/8b956518-9768-477f-9acb-1fc3459427f7-kube-api-access-n9p64\") pod \"placement9800-account-delete-mlmb7\" (UID: \"8b956518-9768-477f-9acb-1fc3459427f7\") " pod="openstack/placement9800-account-delete-mlmb7" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.406839 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b956518-9768-477f-9acb-1fc3459427f7-operator-scripts\") pod \"placement9800-account-delete-mlmb7\" (UID: \"8b956518-9768-477f-9acb-1fc3459427f7\") " pod="openstack/placement9800-account-delete-mlmb7" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.407937 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b956518-9768-477f-9acb-1fc3459427f7-operator-scripts\") pod \"placement9800-account-delete-mlmb7\" (UID: \"8b956518-9768-477f-9acb-1fc3459427f7\") " pod="openstack/placement9800-account-delete-mlmb7" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.410805 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-vcv4d"] Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.444067 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-vcv4d"] Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.462099 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novacell12c4b-account-delete-hgg9m"] Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.463448 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell12c4b-account-delete-hgg9m" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.463653 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbicanf9da-account-delete-4bv5f" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.465381 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-whtm9_ebded868-aaf1-4294-bec1-ec504cdf1810/openstack-network-exporter/0.log" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.465427 4742 generic.go:334] "Generic (PLEG): container finished" podID="ebded868-aaf1-4294-bec1-ec504cdf1810" containerID="a1893c421b54a7a4f57b1e37935b532c024cdde49cad64c136f9c853dae146fa" exitCode=2 Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.465478 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-whtm9" event={"ID":"ebded868-aaf1-4294-bec1-ec504cdf1810","Type":"ContainerDied","Data":"a1893c421b54a7a4f57b1e37935b532c024cdde49cad64c136f9c853dae146fa"} Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.469827 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novacell12c4b-account-delete-hgg9m"] Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.508928 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9p64\" (UniqueName: \"kubernetes.io/projected/8b956518-9768-477f-9acb-1fc3459427f7-kube-api-access-n9p64\") pod \"placement9800-account-delete-mlmb7\" (UID: \"8b956518-9768-477f-9acb-1fc3459427f7\") " pod="openstack/placement9800-account-delete-mlmb7" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.509184 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novacell036a3-account-delete-mmf6x"] Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.511769 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement9800-account-delete-mlmb7" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.519030 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a690523-b1e4-4dd5-b280-58fd8b91b3bf-operator-scripts\") pod \"novaapie0d5-account-delete-294ls\" (UID: \"1a690523-b1e4-4dd5-b280-58fd8b91b3bf\") " pod="openstack/novaapie0d5-account-delete-294ls" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.519311 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rdrp4\" (UniqueName: \"kubernetes.io/projected/1a690523-b1e4-4dd5-b280-58fd8b91b3bf-kube-api-access-rdrp4\") pod \"novaapie0d5-account-delete-294ls\" (UID: \"1a690523-b1e4-4dd5-b280-58fd8b91b3bf\") " pod="openstack/novaapie0d5-account-delete-294ls" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.535781 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell036a3-account-delete-mmf6x" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.560653 4742 generic.go:334] "Generic (PLEG): container finished" podID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerID="2a0650cb5fb1ecf5b2a54d2428e362d6056d9793a00fef4de45d1cd9dff294dd" exitCode=0 Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.560780 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerDied","Data":"2a0650cb5fb1ecf5b2a54d2428e362d6056d9793a00fef4de45d1cd9dff294dd"} Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.560817 4742 scope.go:117] "RemoveContainer" containerID="310cca7f57f78facafa7379a55640dd8bda7651e6fa10b0fa067a67c3dc118ef" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.619389 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.620762 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="a88c6674-8c2f-4868-8839-1ec313fbfe8e" containerName="openstack-network-exporter" containerID="cri-o://1c678ad593b1fd73c0db215db0715772580dca8117af0aa42f0c6d499e00b732" gracePeriod=300 Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.624935 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58xt4\" (UniqueName: \"kubernetes.io/projected/42c1f939-2d9c-4a8d-a341-cbce22551d58-kube-api-access-58xt4\") pod \"novacell12c4b-account-delete-hgg9m\" (UID: \"42c1f939-2d9c-4a8d-a341-cbce22551d58\") " pod="openstack/novacell12c4b-account-delete-hgg9m" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.627120 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a690523-b1e4-4dd5-b280-58fd8b91b3bf-operator-scripts\") pod \"novaapie0d5-account-delete-294ls\" (UID: \"1a690523-b1e4-4dd5-b280-58fd8b91b3bf\") " pod="openstack/novaapie0d5-account-delete-294ls" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.627449 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdrp4\" (UniqueName: \"kubernetes.io/projected/1a690523-b1e4-4dd5-b280-58fd8b91b3bf-kube-api-access-rdrp4\") pod \"novaapie0d5-account-delete-294ls\" (UID: \"1a690523-b1e4-4dd5-b280-58fd8b91b3bf\") " pod="openstack/novaapie0d5-account-delete-294ls" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.627640 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a690523-b1e4-4dd5-b280-58fd8b91b3bf-operator-scripts\") pod \"novaapie0d5-account-delete-294ls\" (UID: \"1a690523-b1e4-4dd5-b280-58fd8b91b3bf\") " pod="openstack/novaapie0d5-account-delete-294ls" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.627650 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5b2208e7-3101-4090-9f35-fba640d2f1d9-operator-scripts\") pod \"novacell036a3-account-delete-mmf6x\" (UID: \"5b2208e7-3101-4090-9f35-fba640d2f1d9\") " pod="openstack/novacell036a3-account-delete-mmf6x" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.690889 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hgkj\" (UniqueName: \"kubernetes.io/projected/5b2208e7-3101-4090-9f35-fba640d2f1d9-kube-api-access-8hgkj\") pod \"novacell036a3-account-delete-mmf6x\" (UID: \"5b2208e7-3101-4090-9f35-fba640d2f1d9\") " pod="openstack/novacell036a3-account-delete-mmf6x" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.690973 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/42c1f939-2d9c-4a8d-a341-cbce22551d58-operator-scripts\") pod \"novacell12c4b-account-delete-hgg9m\" (UID: \"42c1f939-2d9c-4a8d-a341-cbce22551d58\") " pod="openstack/novacell12c4b-account-delete-hgg9m" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.722654 4742 generic.go:334] "Generic (PLEG): container finished" podID="e7d76df0-4f21-4729-9729-1f2ff54a8332" containerID="d1934753cd07a71a87a5b0d5f8a6aecf1b11e12621dc578d17d0fc95dbb8f143" exitCode=2 Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.722701 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novacell036a3-account-delete-mmf6x"] Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.722740 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"e7d76df0-4f21-4729-9729-1f2ff54a8332","Type":"ContainerDied","Data":"d1934753cd07a71a87a5b0d5f8a6aecf1b11e12621dc578d17d0fc95dbb8f143"} Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.746577 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-kb752"] Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.746830 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-cd5cbd7b9-kb752" podUID="8485e5ca-5372-441f-9e02-3df086991b2c" containerName="dnsmasq-dns" containerID="cri-o://3aa39b731afc81b18e6dc9bcdfc9e62825bea59d828add91802d92234f28b7ac" gracePeriod=10 Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.758526 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdrp4\" (UniqueName: \"kubernetes.io/projected/1a690523-b1e4-4dd5-b280-58fd8b91b3bf-kube-api-access-rdrp4\") pod \"novaapie0d5-account-delete-294ls\" (UID: \"1a690523-b1e4-4dd5-b280-58fd8b91b3bf\") " pod="openstack/novaapie0d5-account-delete-294ls" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.768228 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="b8e993d8-0221-4214-b00a-ca745e716bbe" containerName="ovsdbserver-sb" containerID="cri-o://75b9102a45bd7348ee8155d1c3b59cc40301c88e77fb8f3ce1855a075666bb90" gracePeriod=300 Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.781288 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-c87xk"] Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.793238 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hgkj\" (UniqueName: \"kubernetes.io/projected/5b2208e7-3101-4090-9f35-fba640d2f1d9-kube-api-access-8hgkj\") pod \"novacell036a3-account-delete-mmf6x\" (UID: \"5b2208e7-3101-4090-9f35-fba640d2f1d9\") " pod="openstack/novacell036a3-account-delete-mmf6x" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.793292 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/42c1f939-2d9c-4a8d-a341-cbce22551d58-operator-scripts\") pod \"novacell12c4b-account-delete-hgg9m\" (UID: \"42c1f939-2d9c-4a8d-a341-cbce22551d58\") " pod="openstack/novacell12c4b-account-delete-hgg9m" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.793406 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58xt4\" (UniqueName: \"kubernetes.io/projected/42c1f939-2d9c-4a8d-a341-cbce22551d58-kube-api-access-58xt4\") pod \"novacell12c4b-account-delete-hgg9m\" (UID: \"42c1f939-2d9c-4a8d-a341-cbce22551d58\") " pod="openstack/novacell12c4b-account-delete-hgg9m" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.793465 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5b2208e7-3101-4090-9f35-fba640d2f1d9-operator-scripts\") pod \"novacell036a3-account-delete-mmf6x\" (UID: \"5b2208e7-3101-4090-9f35-fba640d2f1d9\") " pod="openstack/novacell036a3-account-delete-mmf6x" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.794330 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5b2208e7-3101-4090-9f35-fba640d2f1d9-operator-scripts\") pod \"novacell036a3-account-delete-mmf6x\" (UID: \"5b2208e7-3101-4090-9f35-fba640d2f1d9\") " pod="openstack/novacell036a3-account-delete-mmf6x" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.795719 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/42c1f939-2d9c-4a8d-a341-cbce22551d58-operator-scripts\") pod \"novacell12c4b-account-delete-hgg9m\" (UID: \"42c1f939-2d9c-4a8d-a341-cbce22551d58\") " pod="openstack/novacell12c4b-account-delete-hgg9m" Dec 05 06:15:17 crc kubenswrapper[4742]: E1205 06:15:17.795890 4742 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Dec 05 06:15:17 crc kubenswrapper[4742]: E1205 06:15:17.796028 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-config-data podName:d6b096f4-483e-48c5-a3e1-a178c0c5ae6e nodeName:}" failed. No retries permitted until 2025-12-05 06:15:18.79600616 +0000 UTC m=+1394.708141222 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-config-data") pod "rabbitmq-server-0" (UID: "d6b096f4-483e-48c5-a3e1-a178c0c5ae6e") : configmap "rabbitmq-config-data" not found Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.831391 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-c87xk"] Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.892384 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-22vpx"] Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.892556 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hgkj\" (UniqueName: \"kubernetes.io/projected/5b2208e7-3101-4090-9f35-fba640d2f1d9-kube-api-access-8hgkj\") pod \"novacell036a3-account-delete-mmf6x\" (UID: \"5b2208e7-3101-4090-9f35-fba640d2f1d9\") " pod="openstack/novacell036a3-account-delete-mmf6x" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.908610 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58xt4\" (UniqueName: \"kubernetes.io/projected/42c1f939-2d9c-4a8d-a341-cbce22551d58-kube-api-access-58xt4\") pod \"novacell12c4b-account-delete-hgg9m\" (UID: \"42c1f939-2d9c-4a8d-a341-cbce22551d58\") " pod="openstack/novacell12c4b-account-delete-hgg9m" Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.913745 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-22vpx"] Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.913756 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="a88c6674-8c2f-4868-8839-1ec313fbfe8e" containerName="ovsdbserver-nb" containerID="cri-o://9d256f9bac05a4b6bb691eed52a6f2da591190f33538fc2e7323132010170272" gracePeriod=300 Dec 05 06:15:17 crc kubenswrapper[4742]: I1205 06:15:17.979750 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-j7dqz"] Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.009244 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-j7dqz"] Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.049106 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-4qhkv"] Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.099978 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-cd5cbd7b9-kb752" podUID="8485e5ca-5372-441f-9e02-3df086991b2c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.195:5353: connect: connection refused" Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.111357 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-4qhkv"] Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.133123 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.133338 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="e3428207-2cb4-47d8-b4d8-941c3a4928fb" containerName="glance-log" containerID="cri-o://c578f580ac4c94f28399a0f7e39da62ca4fb8496169c7001d2053863082caf1f" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.133722 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="e3428207-2cb4-47d8-b4d8-941c3a4928fb" containerName="glance-httpd" containerID="cri-o://b8f02737722d7ebc14c897ea39f901ead1646c8d5e8658a44265bfe41044eed8" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.154115 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-wdsf6"] Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.170169 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-wdsf6"] Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.197965 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novaapie0d5-account-delete-294ls" Dec 05 06:15:18 crc kubenswrapper[4742]: E1205 06:15:18.213161 4742 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Dec 05 06:15:18 crc kubenswrapper[4742]: E1205 06:15:18.213227 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7b5d8165-e06e-4600-9cab-9cf84c010725-config-data podName:7b5d8165-e06e-4600-9cab-9cf84c010725 nodeName:}" failed. No retries permitted until 2025-12-05 06:15:20.213214316 +0000 UTC m=+1396.125349378 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/7b5d8165-e06e-4600-9cab-9cf84c010725-config-data") pod "rabbitmq-cell1-server-0" (UID: "7b5d8165-e06e-4600-9cab-9cf84c010725") : configmap "rabbitmq-cell1-config-data" not found Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.213551 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.213785 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="3b535626-d96c-4843-bc25-c4fafa967b23" containerName="cinder-scheduler" containerID="cri-o://6ee0e1f6ed8fc4033483315f49001dac70cdc9d56d231f0ed6e4bf14ed5391bf" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.214106 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="3b535626-d96c-4843-bc25-c4fafa967b23" containerName="probe" containerID="cri-o://a6798471637e201a3f0d2d87ce22e1f621bb66ed7382d07497c5dc7f71a7d869" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.244466 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell12c4b-account-delete-hgg9m" Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.252020 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.252287 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="d7a764d5-447f-483d-b819-0e398e749600" containerName="glance-log" containerID="cri-o://f455df6d411179859e60d3c9b127100c03c9bd439f8c01b9bb223b4b2bbfd0d5" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.252723 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="d7a764d5-447f-483d-b819-0e398e749600" containerName="glance-httpd" containerID="cri-o://a1b30e5b41ae0a67e19767b1176483a9b711ab959c0c1007661ee4670c30e081" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.281074 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.281525 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="account-server" containerID="cri-o://662fdbccb819aa757a5eacbc682c4c9d90ae7096d453acac04fbb8d2c2d724e9" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.281625 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="swift-recon-cron" containerID="cri-o://23080c098e9241c0fbef2d16834563f5be32e05ed6a93235162276519247c330" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.281660 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="rsync" containerID="cri-o://402a519ada8012b4c837384aa46b5de9d5a53090de81a4bfd5fca5e66afd80ab" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.281688 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="object-expirer" containerID="cri-o://83482a05302b7d016da2098260530a40bbabcc0dd30bbee8e001d56649d1fa10" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.281722 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="object-updater" containerID="cri-o://583bc9aead517370de5511bc87f2ce12fd00f5d749568164aaf7dd9550bed55d" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.281756 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="object-auditor" containerID="cri-o://c8e35cc4fdc899da4c083432abcbef5e1ad92a9b95d3984cab5952bb33e1b375" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.281783 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="object-replicator" containerID="cri-o://76a0ea55014e165c32a9608cf728f086e8082a97b97de1dd0fdb8cb27caa0a7e" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.281811 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="object-server" containerID="cri-o://89f787d3dcbe6d0e4ec54aa9195f1fe45b50844797a04c3b326966f17201a671" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.281838 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="container-updater" containerID="cri-o://10dd23759afdac3e2bd7b9f5ad1e8df111f57ac5da85f46f8da24ff04f9269b3" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.281867 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="container-auditor" containerID="cri-o://a0061dd4be94377433ec372b482fc15e1700f814f12276d54dcb9692d64d5aab" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.281894 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="container-replicator" containerID="cri-o://0eaa57a7d1edb43a21fc4813afcd8bd8362171ef4a64691cceb98492dc6baccb" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.281938 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="container-server" containerID="cri-o://9b48fcbbeeb0dfa6813285a8982e885fd781741008cdbdef2351e5277caa44d7" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.281971 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="account-reaper" containerID="cri-o://f326ad43ebacf85ccc332a8b35e3c1fcca45c28c2a4b229df2457a4983e5ac64" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.282002 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="account-auditor" containerID="cri-o://a572a48ba1392fc5a8267bbf98db108fc184bd4381327a56c58c1ab6e32e931f" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.282033 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="account-replicator" containerID="cri-o://c5fb081f824bef8d38c8af954df8461ee3d95ecdcf24abe5f14ab12b9b79eaaf" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.308550 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.309147 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="e6063f78-1b45-493e-ae25-62239a1ed5e3" containerName="cinder-api-log" containerID="cri-o://873f59d3fccd6e3aebb3bd5b7bfff039e46a5ac7aa542e462b2ded2d505ccf92" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.309278 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="e6063f78-1b45-493e-ae25-62239a1ed5e3" containerName="cinder-api" containerID="cri-o://9ddfbe5ffee29c713d306ff006d773b5b100e240b7d408ad28e4d4bab8088896" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.317117 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell036a3-account-delete-mmf6x" Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.345120 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-574f89688c-hbh7m"] Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.345436 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-574f89688c-hbh7m" podUID="65eece87-4279-4d6c-b2a6-5841fd5b3298" containerName="neutron-api" containerID="cri-o://0f5251869335092c27ec478b7850c0b761a69b72c601c2774db88367eee9ef81" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.345598 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-574f89688c-hbh7m" podUID="65eece87-4279-4d6c-b2a6-5841fd5b3298" containerName="neutron-httpd" containerID="cri-o://c3a0400780874ea1469dfe2dbe4742008a4997fce008a566e72b6e1f3a0d759f" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: E1205 06:15:18.345859 4742 configmap.go:193] Couldn't get configMap openstack/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Dec 05 06:15:18 crc kubenswrapper[4742]: E1205 06:15:18.345912 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/42c1f939-2d9c-4a8d-a341-cbce22551d58-operator-scripts podName:42c1f939-2d9c-4a8d-a341-cbce22551d58 nodeName:}" failed. No retries permitted until 2025-12-05 06:15:18.845898607 +0000 UTC m=+1394.758033659 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/42c1f939-2d9c-4a8d-a341-cbce22551d58-operator-scripts") pod "novacell12c4b-account-delete-hgg9m" (UID: "42c1f939-2d9c-4a8d-a341-cbce22551d58") : configmap "openstack-cell1-scripts" not found Dec 05 06:15:18 crc kubenswrapper[4742]: E1205 06:15:18.362808 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="caa9cfd6937fda940888bb64cbccaa6adf27580ea4e177e3d3adf4b5e4e8b93d" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.382835 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-whtm9_ebded868-aaf1-4294-bec1-ec504cdf1810/openstack-network-exporter/0.log" Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.382911 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-whtm9" Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.425042 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e8bface-3ae8-4a16-85c0-eca434ca57f1" path="/var/lib/kubelet/pods/1e8bface-3ae8-4a16-85c0-eca434ca57f1/volumes" Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.425611 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29e2d29f-d9a8-4c42-b79e-4b287ec09187" path="/var/lib/kubelet/pods/29e2d29f-d9a8-4c42-b79e-4b287ec09187/volumes" Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.450606 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e09e595-f2cd-4121-af36-123d55c2f729" path="/var/lib/kubelet/pods/2e09e595-f2cd-4121-af36-123d55c2f729/volumes" Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.451361 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4df39ed0-9850-4409-8648-724b15671640" path="/var/lib/kubelet/pods/4df39ed0-9850-4409-8648-724b15671640/volumes" Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.452215 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52759157-a5b0-481a-9128-ee595e269af9" path="/var/lib/kubelet/pods/52759157-a5b0-481a-9128-ee595e269af9/volumes" Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.453314 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d56b0e1-cf28-4913-af18-4c13aafc539a" path="/var/lib/kubelet/pods/9d56b0e1-cf28-4913-af18-4c13aafc539a/volumes" Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.453792 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0730438-d5e9-48c6-b5d1-280b1fb0f4b1" path="/var/lib/kubelet/pods/b0730438-d5e9-48c6-b5d1-280b1fb0f4b1/volumes" Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.455582 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7413df9-1f00-41d2-8ef1-e85a83fd6eac" path="/var/lib/kubelet/pods/c7413df9-1f00-41d2-8ef1-e85a83fd6eac/volumes" Dec 05 06:15:18 crc kubenswrapper[4742]: E1205 06:15:18.508785 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="caa9cfd6937fda940888bb64cbccaa6adf27580ea4e177e3d3adf4b5e4e8b93d" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 05 06:15:18 crc kubenswrapper[4742]: E1205 06:15:18.545024 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="caa9cfd6937fda940888bb64cbccaa6adf27580ea4e177e3d3adf4b5e4e8b93d" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 05 06:15:18 crc kubenswrapper[4742]: E1205 06:15:18.545104 4742 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="e7d76df0-4f21-4729-9729-1f2ff54a8332" containerName="ovn-northd" Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.554379 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebded868-aaf1-4294-bec1-ec504cdf1810-metrics-certs-tls-certs\") pod \"ebded868-aaf1-4294-bec1-ec504cdf1810\" (UID: \"ebded868-aaf1-4294-bec1-ec504cdf1810\") " Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.554456 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h64qz\" (UniqueName: \"kubernetes.io/projected/ebded868-aaf1-4294-bec1-ec504cdf1810-kube-api-access-h64qz\") pod \"ebded868-aaf1-4294-bec1-ec504cdf1810\" (UID: \"ebded868-aaf1-4294-bec1-ec504cdf1810\") " Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.554559 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/ebded868-aaf1-4294-bec1-ec504cdf1810-ovn-rundir\") pod \"ebded868-aaf1-4294-bec1-ec504cdf1810\" (UID: \"ebded868-aaf1-4294-bec1-ec504cdf1810\") " Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.554652 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebded868-aaf1-4294-bec1-ec504cdf1810-combined-ca-bundle\") pod \"ebded868-aaf1-4294-bec1-ec504cdf1810\" (UID: \"ebded868-aaf1-4294-bec1-ec504cdf1810\") " Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.554727 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebded868-aaf1-4294-bec1-ec504cdf1810-config\") pod \"ebded868-aaf1-4294-bec1-ec504cdf1810\" (UID: \"ebded868-aaf1-4294-bec1-ec504cdf1810\") " Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.554740 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/ebded868-aaf1-4294-bec1-ec504cdf1810-ovs-rundir\") pod \"ebded868-aaf1-4294-bec1-ec504cdf1810\" (UID: \"ebded868-aaf1-4294-bec1-ec504cdf1810\") " Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.554739 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ebded868-aaf1-4294-bec1-ec504cdf1810-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "ebded868-aaf1-4294-bec1-ec504cdf1810" (UID: "ebded868-aaf1-4294-bec1-ec504cdf1810"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.555197 4742 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/ebded868-aaf1-4294-bec1-ec504cdf1810-ovn-rundir\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.555231 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ebded868-aaf1-4294-bec1-ec504cdf1810-ovs-rundir" (OuterVolumeSpecName: "ovs-rundir") pod "ebded868-aaf1-4294-bec1-ec504cdf1810" (UID: "ebded868-aaf1-4294-bec1-ec504cdf1810"). InnerVolumeSpecName "ovs-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.575605 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-765b847d64-jgxg4"] Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.575646 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.575657 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-5f7476cfc7-5r2mm"] Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.575673 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinderedde-account-delete-pwsvp"] Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.575685 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.575695 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.576104 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-765b847d64-jgxg4" podUID="e42757b3-029e-4fe9-917f-73331394524e" containerName="placement-log" containerID="cri-o://23c49e25d43e79c7f8ade74991cb2aa015e0aae68fc08c3f8bd44099cbee5e4d" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.576349 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-5f7476cfc7-5r2mm" podUID="8ef1f42c-4004-49d9-9456-4d4df074004f" containerName="proxy-httpd" containerID="cri-o://5b04102935046122a7f13426fa065fb74c80fe74f085c0c16a9aab6c2234ef7c" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.576464 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="7038cd99-8151-4157-93c6-3b7f5b9ce25e" containerName="nova-metadata-log" containerID="cri-o://5786613a0ea271c49e36cb812feba2d04a12fd32ef5d2c4e0ebfce2f557616b6" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.576794 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-5f7476cfc7-5r2mm" podUID="8ef1f42c-4004-49d9-9456-4d4df074004f" containerName="proxy-server" containerID="cri-o://26e1bb24efd752b2c3019b0bfc4555cbdbfc083437d9055e783ed7089c77d920" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.576871 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="7038cd99-8151-4157-93c6-3b7f5b9ce25e" containerName="nova-metadata-metadata" containerID="cri-o://4d42993853ddd3815008a6e598dffed4d9fa4416bef732ddaba8c8e33025a533" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.576925 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-765b847d64-jgxg4" podUID="e42757b3-029e-4fe9-917f-73331394524e" containerName="placement-api" containerID="cri-o://87ced7f756fbe6fb669f5837d287cbfc896ceb71b65ba49a0991a9a56aa7f8a6" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.579645 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ebded868-aaf1-4294-bec1-ec504cdf1810-config" (OuterVolumeSpecName: "config") pod "ebded868-aaf1-4294-bec1-ec504cdf1810" (UID: "ebded868-aaf1-4294-bec1-ec504cdf1810"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.592024 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebded868-aaf1-4294-bec1-ec504cdf1810-kube-api-access-h64qz" (OuterVolumeSpecName: "kube-api-access-h64qz") pod "ebded868-aaf1-4294-bec1-ec504cdf1810" (UID: "ebded868-aaf1-4294-bec1-ec504cdf1810"). InnerVolumeSpecName "kube-api-access-h64qz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.617851 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebded868-aaf1-4294-bec1-ec504cdf1810-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ebded868-aaf1-4294-bec1-ec504cdf1810" (UID: "ebded868-aaf1-4294-bec1-ec504cdf1810"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.625831 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.626121 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="aa702931-d853-4f8b-b0d8-58f5476bb7c2" containerName="nova-api-log" containerID="cri-o://69ab6c82edf2ce10d8987c3b5a9194e318538298be58a196bccfa67b05fbaae7" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.626296 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="aa702931-d853-4f8b-b0d8-58f5476bb7c2" containerName="nova-api-api" containerID="cri-o://1f4b1e5b484c4b109f9165cf542665fc7a93e90318c7b4dd1ddb7da94d8a3032" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.656899 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h64qz\" (UniqueName: \"kubernetes.io/projected/ebded868-aaf1-4294-bec1-ec504cdf1810-kube-api-access-h64qz\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.656927 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebded868-aaf1-4294-bec1-ec504cdf1810-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.656936 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ebded868-aaf1-4294-bec1-ec504cdf1810-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.656945 4742 reconciler_common.go:293] "Volume detached for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/ebded868-aaf1-4294-bec1-ec504cdf1810-ovs-rundir\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:18 crc kubenswrapper[4742]: E1205 06:15:18.691850 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9d256f9bac05a4b6bb691eed52a6f2da591190f33538fc2e7323132010170272 is running failed: container process not found" containerID="9d256f9bac05a4b6bb691eed52a6f2da591190f33538fc2e7323132010170272" cmd=["/usr/bin/pidof","ovsdb-server"] Dec 05 06:15:18 crc kubenswrapper[4742]: E1205 06:15:18.695199 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9d256f9bac05a4b6bb691eed52a6f2da591190f33538fc2e7323132010170272 is running failed: container process not found" containerID="9d256f9bac05a4b6bb691eed52a6f2da591190f33538fc2e7323132010170272" cmd=["/usr/bin/pidof","ovsdb-server"] Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.696836 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="7b5d8165-e06e-4600-9cab-9cf84c010725" containerName="rabbitmq" containerID="cri-o://4c9ed2559817c2da1b28311959a187477072585e1e74ef4ffe26d1ce23f9ee55" gracePeriod=604800 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.697022 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-7bd94f978b-h9cm5"] Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.697277 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-7bd94f978b-h9cm5" podUID="2f4c3ae5-d78c-4ddb-953d-cbee5b815be9" containerName="barbican-keystone-listener-log" containerID="cri-o://3710b941ebb6d9eff059006febf49493e12500d9cc0dc124f356193d93849a0b" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.697681 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-7bd94f978b-h9cm5" podUID="2f4c3ae5-d78c-4ddb-953d-cbee5b815be9" containerName="barbican-keystone-listener" containerID="cri-o://2f4156cfc6ee4ae12fa0ce4c17f20f7d82287d71fe307700ba39947bbecf3c02" gracePeriod=30 Dec 05 06:15:18 crc kubenswrapper[4742]: E1205 06:15:18.701735 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9d256f9bac05a4b6bb691eed52a6f2da591190f33538fc2e7323132010170272 is running failed: container process not found" containerID="9d256f9bac05a4b6bb691eed52a6f2da591190f33538fc2e7323132010170272" cmd=["/usr/bin/pidof","ovsdb-server"] Dec 05 06:15:18 crc kubenswrapper[4742]: E1205 06:15:18.701894 4742 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9d256f9bac05a4b6bb691eed52a6f2da591190f33538fc2e7323132010170272 is running failed: container process not found" probeType="Readiness" pod="openstack/ovsdbserver-nb-0" podUID="a88c6674-8c2f-4868-8839-1ec313fbfe8e" containerName="ovsdbserver-nb" Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.708772 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-ks5x5"] Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.721654 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-2c4b-account-create-update-rcrw9"] Dec 05 06:15:18 crc kubenswrapper[4742]: I1205 06:15:18.772266 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-ks5x5"] Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:18.831112 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebded868-aaf1-4294-bec1-ec504cdf1810-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "ebded868-aaf1-4294-bec1-ec504cdf1810" (UID: "ebded868-aaf1-4294-bec1-ec504cdf1810"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:18.864811 4742 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebded868-aaf1-4294-bec1-ec504cdf1810-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:19 crc kubenswrapper[4742]: E1205 06:15:18.864888 4742 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Dec 05 06:15:19 crc kubenswrapper[4742]: E1205 06:15:18.864931 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-config-data podName:d6b096f4-483e-48c5-a3e1-a178c0c5ae6e nodeName:}" failed. No retries permitted until 2025-12-05 06:15:20.864918189 +0000 UTC m=+1396.777053251 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-config-data") pod "rabbitmq-server-0" (UID: "d6b096f4-483e-48c5-a3e1-a178c0c5ae6e") : configmap "rabbitmq-config-data" not found Dec 05 06:15:19 crc kubenswrapper[4742]: E1205 06:15:18.865311 4742 configmap.go:193] Couldn't get configMap openstack/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Dec 05 06:15:19 crc kubenswrapper[4742]: E1205 06:15:18.865341 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/42c1f939-2d9c-4a8d-a341-cbce22551d58-operator-scripts podName:42c1f939-2d9c-4a8d-a341-cbce22551d58 nodeName:}" failed. No retries permitted until 2025-12-05 06:15:19.86533323 +0000 UTC m=+1395.777468292 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/42c1f939-2d9c-4a8d-a341-cbce22551d58-operator-scripts") pod "novacell12c4b-account-delete-hgg9m" (UID: "42c1f939-2d9c-4a8d-a341-cbce22551d58") : configmap "openstack-cell1-scripts" not found Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:18.904260 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-whtm9_ebded868-aaf1-4294-bec1-ec504cdf1810/openstack-network-exporter/0.log" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:18.904381 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-whtm9" event={"ID":"ebded868-aaf1-4294-bec1-ec504cdf1810","Type":"ContainerDied","Data":"c63607bf86ec29f4baa0f1d8167800eb0410440e83bfca824ac57e48ea4ae6b2"} Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:18.904421 4742 scope.go:117] "RemoveContainer" containerID="a1893c421b54a7a4f57b1e37935b532c024cdde49cad64c136f9c853dae146fa" Dec 05 06:15:19 crc kubenswrapper[4742]: E1205 06:15:18.904538 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 75b9102a45bd7348ee8155d1c3b59cc40301c88e77fb8f3ce1855a075666bb90 is running failed: container process not found" containerID="75b9102a45bd7348ee8155d1c3b59cc40301c88e77fb8f3ce1855a075666bb90" cmd=["/usr/bin/pidof","ovsdb-server"] Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:18.904670 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-whtm9" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:18.906471 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-2c4b-account-create-update-rcrw9"] Dec 05 06:15:19 crc kubenswrapper[4742]: E1205 06:15:18.921259 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 75b9102a45bd7348ee8155d1c3b59cc40301c88e77fb8f3ce1855a075666bb90 is running failed: container process not found" containerID="75b9102a45bd7348ee8155d1c3b59cc40301c88e77fb8f3ce1855a075666bb90" cmd=["/usr/bin/pidof","ovsdb-server"] Dec 05 06:15:19 crc kubenswrapper[4742]: E1205 06:15:18.932560 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 75b9102a45bd7348ee8155d1c3b59cc40301c88e77fb8f3ce1855a075666bb90 is running failed: container process not found" containerID="75b9102a45bd7348ee8155d1c3b59cc40301c88e77fb8f3ce1855a075666bb90" cmd=["/usr/bin/pidof","ovsdb-server"] Dec 05 06:15:19 crc kubenswrapper[4742]: E1205 06:15:18.933919 4742 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 75b9102a45bd7348ee8155d1c3b59cc40301c88e77fb8f3ce1855a075666bb90 is running failed: container process not found" probeType="Readiness" pod="openstack/ovsdbserver-sb-0" podUID="b8e993d8-0221-4214-b00a-ca745e716bbe" containerName="ovsdbserver-sb" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:18.936434 4742 generic.go:334] "Generic (PLEG): container finished" podID="b5df8784-b63d-41b7-a542-dcf53ea6cc5e" containerID="54edd9b1ddd6e1ba491286ac963077f358b82344ad584ecba6ecb84b4f7da42c" exitCode=0 Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:18.941657 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-9n84z" event={"ID":"b5df8784-b63d-41b7-a542-dcf53ea6cc5e","Type":"ContainerDied","Data":"54edd9b1ddd6e1ba491286ac963077f358b82344ad584ecba6ecb84b4f7da42c"} Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:18.967261 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell12c4b-account-delete-hgg9m"] Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:18.994976 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_b8e993d8-0221-4214-b00a-ca745e716bbe/ovsdbserver-sb/0.log" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:18.995099 4742 generic.go:334] "Generic (PLEG): container finished" podID="b8e993d8-0221-4214-b00a-ca745e716bbe" containerID="3fa7e542679f95885cdd8ab99d9224e870ebcce54df90ffe1a480a639c7703e9" exitCode=2 Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:18.995123 4742 generic.go:334] "Generic (PLEG): container finished" podID="b8e993d8-0221-4214-b00a-ca745e716bbe" containerID="75b9102a45bd7348ee8155d1c3b59cc40301c88e77fb8f3ce1855a075666bb90" exitCode=143 Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:18.995242 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"b8e993d8-0221-4214-b00a-ca745e716bbe","Type":"ContainerDied","Data":"3fa7e542679f95885cdd8ab99d9224e870ebcce54df90ffe1a480a639c7703e9"} Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:18.995273 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"b8e993d8-0221-4214-b00a-ca745e716bbe","Type":"ContainerDied","Data":"75b9102a45bd7348ee8155d1c3b59cc40301c88e77fb8f3ce1855a075666bb90"} Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.002452 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-tgnp6" podUID="504b6b10-062b-4d3c-8202-fcfd97bc57aa" containerName="ovs-vswitchd" containerID="cri-o://2fe19f91420a01210a21d4371d649e6ddfbc92c7b68947c6b76d971d5d43a7cc" gracePeriod=28 Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.004532 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-66f7f988b5-b5pzf"] Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.005220 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-66f7f988b5-b5pzf" podUID="fa30e851-f383-42c0-9e09-d8c896ed77ad" containerName="barbican-worker-log" containerID="cri-o://9859f27d8f4bb1f62c35ce8e77ebd26f8aca7e99762a5eb2b0f4ad252e8f2430" gracePeriod=30 Dec 05 06:15:19 crc kubenswrapper[4742]: E1205 06:15:19.005263 4742 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Dec 05 06:15:19 crc kubenswrapper[4742]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Dec 05 06:15:19 crc kubenswrapper[4742]: + source /usr/local/bin/container-scripts/functions Dec 05 06:15:19 crc kubenswrapper[4742]: ++ OVNBridge=br-int Dec 05 06:15:19 crc kubenswrapper[4742]: ++ OVNRemote=tcp:localhost:6642 Dec 05 06:15:19 crc kubenswrapper[4742]: ++ OVNEncapType=geneve Dec 05 06:15:19 crc kubenswrapper[4742]: ++ OVNAvailabilityZones= Dec 05 06:15:19 crc kubenswrapper[4742]: ++ EnableChassisAsGateway=true Dec 05 06:15:19 crc kubenswrapper[4742]: ++ PhysicalNetworks= Dec 05 06:15:19 crc kubenswrapper[4742]: ++ OVNHostName= Dec 05 06:15:19 crc kubenswrapper[4742]: ++ DB_FILE=/etc/openvswitch/conf.db Dec 05 06:15:19 crc kubenswrapper[4742]: ++ ovs_dir=/var/lib/openvswitch Dec 05 06:15:19 crc kubenswrapper[4742]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Dec 05 06:15:19 crc kubenswrapper[4742]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Dec 05 06:15:19 crc kubenswrapper[4742]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 05 06:15:19 crc kubenswrapper[4742]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 05 06:15:19 crc kubenswrapper[4742]: + sleep 0.5 Dec 05 06:15:19 crc kubenswrapper[4742]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 05 06:15:19 crc kubenswrapper[4742]: + sleep 0.5 Dec 05 06:15:19 crc kubenswrapper[4742]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 05 06:15:19 crc kubenswrapper[4742]: + sleep 0.5 Dec 05 06:15:19 crc kubenswrapper[4742]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 05 06:15:19 crc kubenswrapper[4742]: + sleep 0.5 Dec 05 06:15:19 crc kubenswrapper[4742]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 05 06:15:19 crc kubenswrapper[4742]: + cleanup_ovsdb_server_semaphore Dec 05 06:15:19 crc kubenswrapper[4742]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 05 06:15:19 crc kubenswrapper[4742]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Dec 05 06:15:19 crc kubenswrapper[4742]: > execCommand=["/usr/local/bin/container-scripts/stop-ovsdb-server.sh"] containerName="ovsdb-server" pod="openstack/ovn-controller-ovs-tgnp6" message=< Dec 05 06:15:19 crc kubenswrapper[4742]: Exiting ovsdb-server (5) [ OK ] Dec 05 06:15:19 crc kubenswrapper[4742]: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Dec 05 06:15:19 crc kubenswrapper[4742]: + source /usr/local/bin/container-scripts/functions Dec 05 06:15:19 crc kubenswrapper[4742]: ++ OVNBridge=br-int Dec 05 06:15:19 crc kubenswrapper[4742]: ++ OVNRemote=tcp:localhost:6642 Dec 05 06:15:19 crc kubenswrapper[4742]: ++ OVNEncapType=geneve Dec 05 06:15:19 crc kubenswrapper[4742]: ++ OVNAvailabilityZones= Dec 05 06:15:19 crc kubenswrapper[4742]: ++ EnableChassisAsGateway=true Dec 05 06:15:19 crc kubenswrapper[4742]: ++ PhysicalNetworks= Dec 05 06:15:19 crc kubenswrapper[4742]: ++ OVNHostName= Dec 05 06:15:19 crc kubenswrapper[4742]: ++ DB_FILE=/etc/openvswitch/conf.db Dec 05 06:15:19 crc kubenswrapper[4742]: ++ ovs_dir=/var/lib/openvswitch Dec 05 06:15:19 crc kubenswrapper[4742]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Dec 05 06:15:19 crc kubenswrapper[4742]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Dec 05 06:15:19 crc kubenswrapper[4742]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 05 06:15:19 crc kubenswrapper[4742]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 05 06:15:19 crc kubenswrapper[4742]: + sleep 0.5 Dec 05 06:15:19 crc kubenswrapper[4742]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 05 06:15:19 crc kubenswrapper[4742]: + sleep 0.5 Dec 05 06:15:19 crc kubenswrapper[4742]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 05 06:15:19 crc kubenswrapper[4742]: + sleep 0.5 Dec 05 06:15:19 crc kubenswrapper[4742]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 05 06:15:19 crc kubenswrapper[4742]: + sleep 0.5 Dec 05 06:15:19 crc kubenswrapper[4742]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 05 06:15:19 crc kubenswrapper[4742]: + cleanup_ovsdb_server_semaphore Dec 05 06:15:19 crc kubenswrapper[4742]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 05 06:15:19 crc kubenswrapper[4742]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Dec 05 06:15:19 crc kubenswrapper[4742]: > Dec 05 06:15:19 crc kubenswrapper[4742]: E1205 06:15:19.005297 4742 kuberuntime_container.go:691] "PreStop hook failed" err=< Dec 05 06:15:19 crc kubenswrapper[4742]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Dec 05 06:15:19 crc kubenswrapper[4742]: + source /usr/local/bin/container-scripts/functions Dec 05 06:15:19 crc kubenswrapper[4742]: ++ OVNBridge=br-int Dec 05 06:15:19 crc kubenswrapper[4742]: ++ OVNRemote=tcp:localhost:6642 Dec 05 06:15:19 crc kubenswrapper[4742]: ++ OVNEncapType=geneve Dec 05 06:15:19 crc kubenswrapper[4742]: ++ OVNAvailabilityZones= Dec 05 06:15:19 crc kubenswrapper[4742]: ++ EnableChassisAsGateway=true Dec 05 06:15:19 crc kubenswrapper[4742]: ++ PhysicalNetworks= Dec 05 06:15:19 crc kubenswrapper[4742]: ++ OVNHostName= Dec 05 06:15:19 crc kubenswrapper[4742]: ++ DB_FILE=/etc/openvswitch/conf.db Dec 05 06:15:19 crc kubenswrapper[4742]: ++ ovs_dir=/var/lib/openvswitch Dec 05 06:15:19 crc kubenswrapper[4742]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Dec 05 06:15:19 crc kubenswrapper[4742]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Dec 05 06:15:19 crc kubenswrapper[4742]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 05 06:15:19 crc kubenswrapper[4742]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 05 06:15:19 crc kubenswrapper[4742]: + sleep 0.5 Dec 05 06:15:19 crc kubenswrapper[4742]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 05 06:15:19 crc kubenswrapper[4742]: + sleep 0.5 Dec 05 06:15:19 crc kubenswrapper[4742]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 05 06:15:19 crc kubenswrapper[4742]: + sleep 0.5 Dec 05 06:15:19 crc kubenswrapper[4742]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 05 06:15:19 crc kubenswrapper[4742]: + sleep 0.5 Dec 05 06:15:19 crc kubenswrapper[4742]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Dec 05 06:15:19 crc kubenswrapper[4742]: + cleanup_ovsdb_server_semaphore Dec 05 06:15:19 crc kubenswrapper[4742]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Dec 05 06:15:19 crc kubenswrapper[4742]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Dec 05 06:15:19 crc kubenswrapper[4742]: > pod="openstack/ovn-controller-ovs-tgnp6" podUID="504b6b10-062b-4d3c-8202-fcfd97bc57aa" containerName="ovsdb-server" containerID="cri-o://56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.005337 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-tgnp6" podUID="504b6b10-062b-4d3c-8202-fcfd97bc57aa" containerName="ovsdb-server" containerID="cri-o://56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8" gracePeriod=28 Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.005413 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-66f7f988b5-b5pzf" podUID="fa30e851-f383-42c0-9e09-d8c896ed77ad" containerName="barbican-worker" containerID="cri-o://c6a468d9df31190dc440d8ef2e011bdc021d2f9b7a5c87349652c7adab9aea44" gracePeriod=30 Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.017661 4742 generic.go:334] "Generic (PLEG): container finished" podID="8485e5ca-5372-441f-9e02-3df086991b2c" containerID="3aa39b731afc81b18e6dc9bcdfc9e62825bea59d828add91802d92234f28b7ac" exitCode=0 Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.017720 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-kb752" event={"ID":"8485e5ca-5372-441f-9e02-3df086991b2c","Type":"ContainerDied","Data":"3aa39b731afc81b18e6dc9bcdfc9e62825bea59d828add91802d92234f28b7ac"} Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.019004 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinderedde-account-delete-pwsvp" event={"ID":"6a5ca1f6-73b0-43da-82c6-995495666585","Type":"ContainerStarted","Data":"d1d24811eef2a778824a8cac36c1ca4c8307de8f31b1b54e816c83914dee86d8"} Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.060377 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.060562 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="338b9928-12cd-4db4-806e-4f42612c5ab6" containerName="nova-cell0-conductor-conductor" containerID="cri-o://85c2b0d2bbfb8b6e3c396234c6a2e7332b515e33d6f3309f0bbf9466f03f62a0" gracePeriod=30 Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.062909 4742 generic.go:334] "Generic (PLEG): container finished" podID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerID="83482a05302b7d016da2098260530a40bbabcc0dd30bbee8e001d56649d1fa10" exitCode=0 Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.062930 4742 generic.go:334] "Generic (PLEG): container finished" podID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerID="583bc9aead517370de5511bc87f2ce12fd00f5d749568164aaf7dd9550bed55d" exitCode=0 Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.062936 4742 generic.go:334] "Generic (PLEG): container finished" podID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerID="c8e35cc4fdc899da4c083432abcbef5e1ad92a9b95d3984cab5952bb33e1b375" exitCode=0 Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.062945 4742 generic.go:334] "Generic (PLEG): container finished" podID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerID="76a0ea55014e165c32a9608cf728f086e8082a97b97de1dd0fdb8cb27caa0a7e" exitCode=0 Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.062954 4742 generic.go:334] "Generic (PLEG): container finished" podID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerID="10dd23759afdac3e2bd7b9f5ad1e8df111f57ac5da85f46f8da24ff04f9269b3" exitCode=0 Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.062962 4742 generic.go:334] "Generic (PLEG): container finished" podID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerID="a0061dd4be94377433ec372b482fc15e1700f814f12276d54dcb9692d64d5aab" exitCode=0 Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.062970 4742 generic.go:334] "Generic (PLEG): container finished" podID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerID="0eaa57a7d1edb43a21fc4813afcd8bd8362171ef4a64691cceb98492dc6baccb" exitCode=0 Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.062976 4742 generic.go:334] "Generic (PLEG): container finished" podID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerID="f326ad43ebacf85ccc332a8b35e3c1fcca45c28c2a4b229df2457a4983e5ac64" exitCode=0 Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.062983 4742 generic.go:334] "Generic (PLEG): container finished" podID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerID="a572a48ba1392fc5a8267bbf98db108fc184bd4381327a56c58c1ab6e32e931f" exitCode=0 Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.062992 4742 generic.go:334] "Generic (PLEG): container finished" podID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerID="c5fb081f824bef8d38c8af954df8461ee3d95ecdcf24abe5f14ab12b9b79eaaf" exitCode=0 Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.063033 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b","Type":"ContainerDied","Data":"83482a05302b7d016da2098260530a40bbabcc0dd30bbee8e001d56649d1fa10"} Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.063066 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b","Type":"ContainerDied","Data":"583bc9aead517370de5511bc87f2ce12fd00f5d749568164aaf7dd9550bed55d"} Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.063127 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b","Type":"ContainerDied","Data":"c8e35cc4fdc899da4c083432abcbef5e1ad92a9b95d3984cab5952bb33e1b375"} Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.063171 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b","Type":"ContainerDied","Data":"76a0ea55014e165c32a9608cf728f086e8082a97b97de1dd0fdb8cb27caa0a7e"} Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.063181 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b","Type":"ContainerDied","Data":"10dd23759afdac3e2bd7b9f5ad1e8df111f57ac5da85f46f8da24ff04f9269b3"} Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.063234 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b","Type":"ContainerDied","Data":"a0061dd4be94377433ec372b482fc15e1700f814f12276d54dcb9692d64d5aab"} Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.063246 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b","Type":"ContainerDied","Data":"0eaa57a7d1edb43a21fc4813afcd8bd8362171ef4a64691cceb98492dc6baccb"} Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.063257 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b","Type":"ContainerDied","Data":"f326ad43ebacf85ccc332a8b35e3c1fcca45c28c2a4b229df2457a4983e5ac64"} Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.063267 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b","Type":"ContainerDied","Data":"a572a48ba1392fc5a8267bbf98db108fc184bd4381327a56c58c1ab6e32e931f"} Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.063275 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b","Type":"ContainerDied","Data":"c5fb081f824bef8d38c8af954df8461ee3d95ecdcf24abe5f14ab12b9b79eaaf"} Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.065496 4742 generic.go:334] "Generic (PLEG): container finished" podID="e3428207-2cb4-47d8-b4d8-941c3a4928fb" containerID="c578f580ac4c94f28399a0f7e39da62ca4fb8496169c7001d2053863082caf1f" exitCode=143 Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.065600 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e3428207-2cb4-47d8-b4d8-941c3a4928fb","Type":"ContainerDied","Data":"c578f580ac4c94f28399a0f7e39da62ca4fb8496169c7001d2053863082caf1f"} Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.066996 4742 generic.go:334] "Generic (PLEG): container finished" podID="e6063f78-1b45-493e-ae25-62239a1ed5e3" containerID="873f59d3fccd6e3aebb3bd5b7bfff039e46a5ac7aa542e462b2ded2d505ccf92" exitCode=143 Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.067029 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e6063f78-1b45-493e-ae25-62239a1ed5e3","Type":"ContainerDied","Data":"873f59d3fccd6e3aebb3bd5b7bfff039e46a5ac7aa542e462b2ded2d505ccf92"} Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.096557 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5b594b6ccb-vbxpj"] Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.096797 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5b594b6ccb-vbxpj" podUID="e9974486-076d-4493-af32-a08eef334572" containerName="barbican-api-log" containerID="cri-o://c58c31d14e6bc541855c1db0c9f365ab77a7e68becb2933d6d951d6d108a2537" gracePeriod=30 Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.097226 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5b594b6ccb-vbxpj" podUID="e9974486-076d-4493-af32-a08eef334572" containerName="barbican-api" containerID="cri-o://1d507c229540319f85af5a5bb49cd7bea47d3c4c4e80bec322f63230f391811c" gracePeriod=30 Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.105240 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_a88c6674-8c2f-4868-8839-1ec313fbfe8e/ovsdbserver-nb/0.log" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.105303 4742 generic.go:334] "Generic (PLEG): container finished" podID="a88c6674-8c2f-4868-8839-1ec313fbfe8e" containerID="1c678ad593b1fd73c0db215db0715772580dca8117af0aa42f0c6d499e00b732" exitCode=2 Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.105320 4742 generic.go:334] "Generic (PLEG): container finished" podID="a88c6674-8c2f-4868-8839-1ec313fbfe8e" containerID="9d256f9bac05a4b6bb691eed52a6f2da591190f33538fc2e7323132010170272" exitCode=143 Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.105411 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"a88c6674-8c2f-4868-8839-1ec313fbfe8e","Type":"ContainerDied","Data":"1c678ad593b1fd73c0db215db0715772580dca8117af0aa42f0c6d499e00b732"} Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.105438 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"a88c6674-8c2f-4868-8839-1ec313fbfe8e","Type":"ContainerDied","Data":"9d256f9bac05a4b6bb691eed52a6f2da591190f33538fc2e7323132010170272"} Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.119163 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-t2p4w"] Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.123689 4742 generic.go:334] "Generic (PLEG): container finished" podID="7f1e3dac-5031-4dfe-815c-1c1b447f0d64" containerID="0b24c5bc7a890ac19ac54b2ce282a48fb159a89cb8f1121185c3c1f4ebc77ba4" exitCode=137 Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.138525 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-cell1-galera-0" podUID="85632fad-1ab6-495e-9049-6b5dad9cc955" containerName="galera" containerID="cri-o://43f59b0dd0673acbf1e8b1cb19d732574d31c64af500c88a90c6c1409d92d526" gracePeriod=30 Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.153821 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-t2p4w"] Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.156989 4742 generic.go:334] "Generic (PLEG): container finished" podID="d7a764d5-447f-483d-b819-0e398e749600" containerID="f455df6d411179859e60d3c9b127100c03c9bd439f8c01b9bb223b4b2bbfd0d5" exitCode=143 Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.157097 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d7a764d5-447f-483d-b819-0e398e749600","Type":"ContainerDied","Data":"f455df6d411179859e60d3c9b127100c03c9bd439f8c01b9bb223b4b2bbfd0d5"} Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.159397 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerStarted","Data":"acfdc7839036ec5564d1440bf9182e5fc60d12c3795fe108a67d63d46cc78c7b"} Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.185168 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.185441 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="a1b7e898-ff4e-4523-8602-18d5937c3e5f" containerName="nova-cell1-conductor-conductor" containerID="cri-o://37ce92fb4c8b7e7a04ce6e6e4b58a8a399c55f7403d7878e536352ac8232319e" gracePeriod=30 Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.214329 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-q5mkg"] Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.237244 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.251692 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-q5mkg"] Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.284978 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.285206 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="e07b564a-eb31-4f88-ae69-44cceef519a4" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://e3d277fe25b44b8eabf49e249cf506a11614da370c828683e2d2b01d444716d4" gracePeriod=30 Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.301991 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.302205 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="931816fd-7570-46ac-b555-368b196b030c" containerName="nova-scheduler-scheduler" containerID="cri-o://19a84075a4a5b72bcffa6b3cad99511d70c5764348b285fdef52052f3200d9ee" gracePeriod=30 Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.456369 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="d6b096f4-483e-48c5-a3e1-a178c0c5ae6e" containerName="rabbitmq" containerID="cri-o://9049700e89ccd644394c6fda74ff3a49949e7dbd626334fac51902707979e0d4" gracePeriod=604800 Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.556177 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron5be2-account-delete-l9sfl"] Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.681568 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9n84z" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.695757 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-kb752" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.707629 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-whtm9"] Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.714681 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-metrics-whtm9"] Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.721610 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_b8e993d8-0221-4214-b00a-ca745e716bbe/ovsdbserver-sb/0.log" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.721670 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.751275 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.805532 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-97db6\" (UniqueName: \"kubernetes.io/projected/8485e5ca-5372-441f-9e02-3df086991b2c-kube-api-access-97db6\") pod \"8485e5ca-5372-441f-9e02-3df086991b2c\" (UID: \"8485e5ca-5372-441f-9e02-3df086991b2c\") " Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.805582 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"b8e993d8-0221-4214-b00a-ca745e716bbe\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") " Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.805648 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b8e993d8-0221-4214-b00a-ca745e716bbe-ovsdb-rundir\") pod \"b8e993d8-0221-4214-b00a-ca745e716bbe\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") " Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.805670 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b8e993d8-0221-4214-b00a-ca745e716bbe-scripts\") pod \"b8e993d8-0221-4214-b00a-ca745e716bbe\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") " Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.805694 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8485e5ca-5372-441f-9e02-3df086991b2c-config\") pod \"8485e5ca-5372-441f-9e02-3df086991b2c\" (UID: \"8485e5ca-5372-441f-9e02-3df086991b2c\") " Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.805716 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8485e5ca-5372-441f-9e02-3df086991b2c-ovsdbserver-sb\") pod \"8485e5ca-5372-441f-9e02-3df086991b2c\" (UID: \"8485e5ca-5372-441f-9e02-3df086991b2c\") " Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.805743 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8485e5ca-5372-441f-9e02-3df086991b2c-dns-svc\") pod \"8485e5ca-5372-441f-9e02-3df086991b2c\" (UID: \"8485e5ca-5372-441f-9e02-3df086991b2c\") " Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.805759 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8e993d8-0221-4214-b00a-ca745e716bbe-combined-ca-bundle\") pod \"b8e993d8-0221-4214-b00a-ca745e716bbe\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") " Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.805774 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8e993d8-0221-4214-b00a-ca745e716bbe-config\") pod \"b8e993d8-0221-4214-b00a-ca745e716bbe\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") " Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.805814 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8485e5ca-5372-441f-9e02-3df086991b2c-dns-swift-storage-0\") pod \"8485e5ca-5372-441f-9e02-3df086991b2c\" (UID: \"8485e5ca-5372-441f-9e02-3df086991b2c\") " Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.805833 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-var-run-ovn\") pod \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\" (UID: \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\") " Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.805896 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-var-log-ovn\") pod \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\" (UID: \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\") " Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.805911 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7scpt\" (UniqueName: \"kubernetes.io/projected/b8e993d8-0221-4214-b00a-ca745e716bbe-kube-api-access-7scpt\") pod \"b8e993d8-0221-4214-b00a-ca745e716bbe\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") " Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.805964 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b8e993d8-0221-4214-b00a-ca745e716bbe-metrics-certs-tls-certs\") pod \"b8e993d8-0221-4214-b00a-ca745e716bbe\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") " Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.805983 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b8e993d8-0221-4214-b00a-ca745e716bbe-ovsdbserver-sb-tls-certs\") pod \"b8e993d8-0221-4214-b00a-ca745e716bbe\" (UID: \"b8e993d8-0221-4214-b00a-ca745e716bbe\") " Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.806000 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-combined-ca-bundle\") pod \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\" (UID: \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\") " Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.806023 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8485e5ca-5372-441f-9e02-3df086991b2c-ovsdbserver-nb\") pod \"8485e5ca-5372-441f-9e02-3df086991b2c\" (UID: \"8485e5ca-5372-441f-9e02-3df086991b2c\") " Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.806077 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-ovn-controller-tls-certs\") pod \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\" (UID: \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\") " Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.806099 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-var-run\") pod \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\" (UID: \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\") " Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.806127 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7gg5h\" (UniqueName: \"kubernetes.io/projected/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-kube-api-access-7gg5h\") pod \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\" (UID: \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\") " Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.806143 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-scripts\") pod \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\" (UID: \"b5df8784-b63d-41b7-a542-dcf53ea6cc5e\") " Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.811501 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-scripts" (OuterVolumeSpecName: "scripts") pod "b5df8784-b63d-41b7-a542-dcf53ea6cc5e" (UID: "b5df8784-b63d-41b7-a542-dcf53ea6cc5e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.813447 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-var-run" (OuterVolumeSpecName: "var-run") pod "b5df8784-b63d-41b7-a542-dcf53ea6cc5e" (UID: "b5df8784-b63d-41b7-a542-dcf53ea6cc5e"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.815076 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "b5df8784-b63d-41b7-a542-dcf53ea6cc5e" (UID: "b5df8784-b63d-41b7-a542-dcf53ea6cc5e"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.816165 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "b5df8784-b63d-41b7-a542-dcf53ea6cc5e" (UID: "b5df8784-b63d-41b7-a542-dcf53ea6cc5e"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.820383 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b8e993d8-0221-4214-b00a-ca745e716bbe-config" (OuterVolumeSpecName: "config") pod "b8e993d8-0221-4214-b00a-ca745e716bbe" (UID: "b8e993d8-0221-4214-b00a-ca745e716bbe"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.837741 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8e993d8-0221-4214-b00a-ca745e716bbe-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "b8e993d8-0221-4214-b00a-ca745e716bbe" (UID: "b8e993d8-0221-4214-b00a-ca745e716bbe"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.838178 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b8e993d8-0221-4214-b00a-ca745e716bbe-scripts" (OuterVolumeSpecName: "scripts") pod "b8e993d8-0221-4214-b00a-ca745e716bbe" (UID: "b8e993d8-0221-4214-b00a-ca745e716bbe"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.849652 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "b8e993d8-0221-4214-b00a-ca745e716bbe" (UID: "b8e993d8-0221-4214-b00a-ca745e716bbe"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.859423 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8485e5ca-5372-441f-9e02-3df086991b2c-kube-api-access-97db6" (OuterVolumeSpecName: "kube-api-access-97db6") pod "8485e5ca-5372-441f-9e02-3df086991b2c" (UID: "8485e5ca-5372-441f-9e02-3df086991b2c"). InnerVolumeSpecName "kube-api-access-97db6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.859521 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8e993d8-0221-4214-b00a-ca745e716bbe-kube-api-access-7scpt" (OuterVolumeSpecName: "kube-api-access-7scpt") pod "b8e993d8-0221-4214-b00a-ca745e716bbe" (UID: "b8e993d8-0221-4214-b00a-ca745e716bbe"). InnerVolumeSpecName "kube-api-access-7scpt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.885725 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-kube-api-access-7gg5h" (OuterVolumeSpecName: "kube-api-access-7gg5h") pod "b5df8784-b63d-41b7-a542-dcf53ea6cc5e" (UID: "b5df8784-b63d-41b7-a542-dcf53ea6cc5e"). InnerVolumeSpecName "kube-api-access-7gg5h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.907708 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7f1e3dac-5031-4dfe-815c-1c1b447f0d64-openstack-config-secret\") pod \"7f1e3dac-5031-4dfe-815c-1c1b447f0d64\" (UID: \"7f1e3dac-5031-4dfe-815c-1c1b447f0d64\") " Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.908309 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7f1e3dac-5031-4dfe-815c-1c1b447f0d64-openstack-config\") pod \"7f1e3dac-5031-4dfe-815c-1c1b447f0d64\" (UID: \"7f1e3dac-5031-4dfe-815c-1c1b447f0d64\") " Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.908511 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f1e3dac-5031-4dfe-815c-1c1b447f0d64-combined-ca-bundle\") pod \"7f1e3dac-5031-4dfe-815c-1c1b447f0d64\" (UID: \"7f1e3dac-5031-4dfe-815c-1c1b447f0d64\") " Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.908712 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brmjs\" (UniqueName: \"kubernetes.io/projected/7f1e3dac-5031-4dfe-815c-1c1b447f0d64-kube-api-access-brmjs\") pod \"7f1e3dac-5031-4dfe-815c-1c1b447f0d64\" (UID: \"7f1e3dac-5031-4dfe-815c-1c1b447f0d64\") " Dec 05 06:15:19 crc kubenswrapper[4742]: E1205 06:15:19.909351 4742 configmap.go:193] Couldn't get configMap openstack/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Dec 05 06:15:19 crc kubenswrapper[4742]: E1205 06:15:19.909436 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/42c1f939-2d9c-4a8d-a341-cbce22551d58-operator-scripts podName:42c1f939-2d9c-4a8d-a341-cbce22551d58 nodeName:}" failed. No retries permitted until 2025-12-05 06:15:21.909419994 +0000 UTC m=+1397.821555056 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/42c1f939-2d9c-4a8d-a341-cbce22551d58-operator-scripts") pod "novacell12c4b-account-delete-hgg9m" (UID: "42c1f939-2d9c-4a8d-a341-cbce22551d58") : configmap "openstack-cell1-scripts" not found Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.910313 4742 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-var-run\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.910579 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.910671 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7gg5h\" (UniqueName: \"kubernetes.io/projected/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-kube-api-access-7gg5h\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.910758 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-97db6\" (UniqueName: \"kubernetes.io/projected/8485e5ca-5372-441f-9e02-3df086991b2c-kube-api-access-97db6\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.910877 4742 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.910984 4742 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b8e993d8-0221-4214-b00a-ca745e716bbe-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.911111 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b8e993d8-0221-4214-b00a-ca745e716bbe-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.911233 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8e993d8-0221-4214-b00a-ca745e716bbe-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.911313 4742 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.911384 4742 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.911468 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7scpt\" (UniqueName: \"kubernetes.io/projected/b8e993d8-0221-4214-b00a-ca745e716bbe-kube-api-access-7scpt\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:19 crc kubenswrapper[4742]: I1205 06:15:19.915976 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f1e3dac-5031-4dfe-815c-1c1b447f0d64-kube-api-access-brmjs" (OuterVolumeSpecName: "kube-api-access-brmjs") pod "7f1e3dac-5031-4dfe-815c-1c1b447f0d64" (UID: "7f1e3dac-5031-4dfe-815c-1c1b447f0d64"). InnerVolumeSpecName "kube-api-access-brmjs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.012286 4742 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.015476 4742 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.016238 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brmjs\" (UniqueName: \"kubernetes.io/projected/7f1e3dac-5031-4dfe-815c-1c1b447f0d64-kube-api-access-brmjs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.024816 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8485e5ca-5372-441f-9e02-3df086991b2c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8485e5ca-5372-441f-9e02-3df086991b2c" (UID: "8485e5ca-5372-441f-9e02-3df086991b2c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.054267 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8e993d8-0221-4214-b00a-ca745e716bbe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b8e993d8-0221-4214-b00a-ca745e716bbe" (UID: "b8e993d8-0221-4214-b00a-ca745e716bbe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.080422 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_a88c6674-8c2f-4868-8839-1ec313fbfe8e/ovsdbserver-nb/0.log" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.080529 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.119239 4742 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8485e5ca-5372-441f-9e02-3df086991b2c-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.119285 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8e993d8-0221-4214-b00a-ca745e716bbe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.122439 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f1e3dac-5031-4dfe-815c-1c1b447f0d64-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "7f1e3dac-5031-4dfe-815c-1c1b447f0d64" (UID: "7f1e3dac-5031-4dfe-815c-1c1b447f0d64"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.136489 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b5df8784-b63d-41b7-a542-dcf53ea6cc5e" (UID: "b5df8784-b63d-41b7-a542-dcf53ea6cc5e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.180541 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8485e5ca-5372-441f-9e02-3df086991b2c-config" (OuterVolumeSpecName: "config") pod "8485e5ca-5372-441f-9e02-3df086991b2c" (UID: "8485e5ca-5372-441f-9e02-3df086991b2c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.184824 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8485e5ca-5372-441f-9e02-3df086991b2c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8485e5ca-5372-441f-9e02-3df086991b2c" (UID: "8485e5ca-5372-441f-9e02-3df086991b2c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.187349 4742 generic.go:334] "Generic (PLEG): container finished" podID="e07b564a-eb31-4f88-ae69-44cceef519a4" containerID="e3d277fe25b44b8eabf49e249cf506a11614da370c828683e2d2b01d444716d4" exitCode=0 Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.187413 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e07b564a-eb31-4f88-ae69-44cceef519a4","Type":"ContainerDied","Data":"e3d277fe25b44b8eabf49e249cf506a11614da370c828683e2d2b01d444716d4"} Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.200091 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8485e5ca-5372-441f-9e02-3df086991b2c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8485e5ca-5372-441f-9e02-3df086991b2c" (UID: "8485e5ca-5372-441f-9e02-3df086991b2c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.223076 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_a88c6674-8c2f-4868-8839-1ec313fbfe8e/ovsdbserver-nb/0.log" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.223149 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"a88c6674-8c2f-4868-8839-1ec313fbfe8e","Type":"ContainerDied","Data":"0bc73304c447d34eca790cf093a15dc151dbf2bae8378b7b5a87aaf6245d1d9f"} Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.223187 4742 scope.go:117] "RemoveContainer" containerID="1c678ad593b1fd73c0db215db0715772580dca8117af0aa42f0c6d499e00b732" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.223280 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.223770 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fgpz8\" (UniqueName: \"kubernetes.io/projected/a88c6674-8c2f-4868-8839-1ec313fbfe8e-kube-api-access-fgpz8\") pod \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") " Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.223827 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a88c6674-8c2f-4868-8839-1ec313fbfe8e-ovsdbserver-nb-tls-certs\") pod \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") " Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.223916 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a88c6674-8c2f-4868-8839-1ec313fbfe8e-ovsdb-rundir\") pod \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") " Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.224021 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a88c6674-8c2f-4868-8839-1ec313fbfe8e-config\") pod \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") " Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.226238 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a88c6674-8c2f-4868-8839-1ec313fbfe8e-combined-ca-bundle\") pod \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") " Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.226277 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") " Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.226486 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a88c6674-8c2f-4868-8839-1ec313fbfe8e-scripts\") pod \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") " Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.226625 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a88c6674-8c2f-4868-8839-1ec313fbfe8e-metrics-certs-tls-certs\") pod \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\" (UID: \"a88c6674-8c2f-4868-8839-1ec313fbfe8e\") " Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.227240 4742 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7f1e3dac-5031-4dfe-815c-1c1b447f0d64-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.227263 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.227273 4742 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8485e5ca-5372-441f-9e02-3df086991b2c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.227286 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8485e5ca-5372-441f-9e02-3df086991b2c-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.227298 4742 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8485e5ca-5372-441f-9e02-3df086991b2c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.227663 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a88c6674-8c2f-4868-8839-1ec313fbfe8e-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "a88c6674-8c2f-4868-8839-1ec313fbfe8e" (UID: "a88c6674-8c2f-4868-8839-1ec313fbfe8e"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.228558 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a88c6674-8c2f-4868-8839-1ec313fbfe8e-scripts" (OuterVolumeSpecName: "scripts") pod "a88c6674-8c2f-4868-8839-1ec313fbfe8e" (UID: "a88c6674-8c2f-4868-8839-1ec313fbfe8e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.228704 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a88c6674-8c2f-4868-8839-1ec313fbfe8e-config" (OuterVolumeSpecName: "config") pod "a88c6674-8c2f-4868-8839-1ec313fbfe8e" (UID: "a88c6674-8c2f-4868-8839-1ec313fbfe8e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:20 crc kubenswrapper[4742]: E1205 06:15:20.229256 4742 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Dec 05 06:15:20 crc kubenswrapper[4742]: E1205 06:15:20.229306 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7b5d8165-e06e-4600-9cab-9cf84c010725-config-data podName:7b5d8165-e06e-4600-9cab-9cf84c010725 nodeName:}" failed. No retries permitted until 2025-12-05 06:15:24.229287622 +0000 UTC m=+1400.141422684 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/7b5d8165-e06e-4600-9cab-9cf84c010725-config-data") pod "rabbitmq-cell1-server-0" (UID: "7b5d8165-e06e-4600-9cab-9cf84c010725") : configmap "rabbitmq-cell1-config-data" not found Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.236466 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a88c6674-8c2f-4868-8839-1ec313fbfe8e-kube-api-access-fgpz8" (OuterVolumeSpecName: "kube-api-access-fgpz8") pod "a88c6674-8c2f-4868-8839-1ec313fbfe8e" (UID: "a88c6674-8c2f-4868-8839-1ec313fbfe8e"). InnerVolumeSpecName "kube-api-access-fgpz8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.245634 4742 generic.go:334] "Generic (PLEG): container finished" podID="7038cd99-8151-4157-93c6-3b7f5b9ce25e" containerID="5786613a0ea271c49e36cb812feba2d04a12fd32ef5d2c4e0ebfce2f557616b6" exitCode=143 Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.245730 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7038cd99-8151-4157-93c6-3b7f5b9ce25e","Type":"ContainerDied","Data":"5786613a0ea271c49e36cb812feba2d04a12fd32ef5d2c4e0ebfce2f557616b6"} Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.251273 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "a88c6674-8c2f-4868-8839-1ec313fbfe8e" (UID: "a88c6674-8c2f-4868-8839-1ec313fbfe8e"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.255272 4742 generic.go:334] "Generic (PLEG): container finished" podID="3b535626-d96c-4843-bc25-c4fafa967b23" containerID="a6798471637e201a3f0d2d87ce22e1f621bb66ed7382d07497c5dc7f71a7d869" exitCode=0 Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.255326 4742 generic.go:334] "Generic (PLEG): container finished" podID="3b535626-d96c-4843-bc25-c4fafa967b23" containerID="6ee0e1f6ed8fc4033483315f49001dac70cdc9d56d231f0ed6e4bf14ed5391bf" exitCode=0 Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.255394 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"3b535626-d96c-4843-bc25-c4fafa967b23","Type":"ContainerDied","Data":"a6798471637e201a3f0d2d87ce22e1f621bb66ed7382d07497c5dc7f71a7d869"} Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.255421 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"3b535626-d96c-4843-bc25-c4fafa967b23","Type":"ContainerDied","Data":"6ee0e1f6ed8fc4033483315f49001dac70cdc9d56d231f0ed6e4bf14ed5391bf"} Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.258000 4742 generic.go:334] "Generic (PLEG): container finished" podID="65eece87-4279-4d6c-b2a6-5841fd5b3298" containerID="c3a0400780874ea1469dfe2dbe4742008a4997fce008a566e72b6e1f3a0d759f" exitCode=0 Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.258069 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-574f89688c-hbh7m" event={"ID":"65eece87-4279-4d6c-b2a6-5841fd5b3298","Type":"ContainerDied","Data":"c3a0400780874ea1469dfe2dbe4742008a4997fce008a566e72b6e1f3a0d759f"} Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.259393 4742 generic.go:334] "Generic (PLEG): container finished" podID="8ef1f42c-4004-49d9-9456-4d4df074004f" containerID="26e1bb24efd752b2c3019b0bfc4555cbdbfc083437d9055e783ed7089c77d920" exitCode=0 Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.259414 4742 generic.go:334] "Generic (PLEG): container finished" podID="8ef1f42c-4004-49d9-9456-4d4df074004f" containerID="5b04102935046122a7f13426fa065fb74c80fe74f085c0c16a9aab6c2234ef7c" exitCode=0 Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.259446 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5f7476cfc7-5r2mm" event={"ID":"8ef1f42c-4004-49d9-9456-4d4df074004f","Type":"ContainerDied","Data":"26e1bb24efd752b2c3019b0bfc4555cbdbfc083437d9055e783ed7089c77d920"} Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.259463 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5f7476cfc7-5r2mm" event={"ID":"8ef1f42c-4004-49d9-9456-4d4df074004f","Type":"ContainerDied","Data":"5b04102935046122a7f13426fa065fb74c80fe74f085c0c16a9aab6c2234ef7c"} Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.260619 4742 generic.go:334] "Generic (PLEG): container finished" podID="e42757b3-029e-4fe9-917f-73331394524e" containerID="23c49e25d43e79c7f8ade74991cb2aa015e0aae68fc08c3f8bd44099cbee5e4d" exitCode=143 Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.260656 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-765b847d64-jgxg4" event={"ID":"e42757b3-029e-4fe9-917f-73331394524e","Type":"ContainerDied","Data":"23c49e25d43e79c7f8ade74991cb2aa015e0aae68fc08c3f8bd44099cbee5e4d"} Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.270127 4742 generic.go:334] "Generic (PLEG): container finished" podID="504b6b10-062b-4d3c-8202-fcfd97bc57aa" containerID="56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8" exitCode=0 Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.270196 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-tgnp6" event={"ID":"504b6b10-062b-4d3c-8202-fcfd97bc57aa","Type":"ContainerDied","Data":"56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8"} Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.273286 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f1e3dac-5031-4dfe-815c-1c1b447f0d64-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "7f1e3dac-5031-4dfe-815c-1c1b447f0d64" (UID: "7f1e3dac-5031-4dfe-815c-1c1b447f0d64"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.279876 4742 generic.go:334] "Generic (PLEG): container finished" podID="2f4c3ae5-d78c-4ddb-953d-cbee5b815be9" containerID="3710b941ebb6d9eff059006febf49493e12500d9cc0dc124f356193d93849a0b" exitCode=143 Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.280702 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7bd94f978b-h9cm5" event={"ID":"2f4c3ae5-d78c-4ddb-953d-cbee5b815be9","Type":"ContainerDied","Data":"3710b941ebb6d9eff059006febf49493e12500d9cc0dc124f356193d93849a0b"} Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.297332 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f1e3dac-5031-4dfe-815c-1c1b447f0d64-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7f1e3dac-5031-4dfe-815c-1c1b447f0d64" (UID: "7f1e3dac-5031-4dfe-815c-1c1b447f0d64"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.314246 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8485e5ca-5372-441f-9e02-3df086991b2c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "8485e5ca-5372-441f-9e02-3df086991b2c" (UID: "8485e5ca-5372-441f-9e02-3df086991b2c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.329941 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f1e3dac-5031-4dfe-815c-1c1b447f0d64-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.330148 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fgpz8\" (UniqueName: \"kubernetes.io/projected/a88c6674-8c2f-4868-8839-1ec313fbfe8e-kube-api-access-fgpz8\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.330162 4742 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a88c6674-8c2f-4868-8839-1ec313fbfe8e-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.330172 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a88c6674-8c2f-4868-8839-1ec313fbfe8e-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.330222 4742 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.330234 4742 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8485e5ca-5372-441f-9e02-3df086991b2c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.330243 4742 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7f1e3dac-5031-4dfe-815c-1c1b447f0d64-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.330253 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a88c6674-8c2f-4868-8839-1ec313fbfe8e-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.330498 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbicanf9da-account-delete-4bv5f"] Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.392480 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-ovn-controller-tls-certs" (OuterVolumeSpecName: "ovn-controller-tls-certs") pod "b5df8784-b63d-41b7-a542-dcf53ea6cc5e" (UID: "b5df8784-b63d-41b7-a542-dcf53ea6cc5e"). InnerVolumeSpecName "ovn-controller-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.427464 4742 scope.go:117] "RemoveContainer" containerID="9d256f9bac05a4b6bb691eed52a6f2da591190f33538fc2e7323132010170272" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.435030 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59e2f35a-a430-4187-b30a-43a8f1872d9f" path="/var/lib/kubelet/pods/59e2f35a-a430-4187-b30a-43a8f1872d9f/volumes" Dec 05 06:15:20 crc kubenswrapper[4742]: E1205 06:15:20.455868 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8 is running failed: container process not found" containerID="56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.459113 4742 reconciler_common.go:293] "Volume detached for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5df8784-b63d-41b7-a542-dcf53ea6cc5e-ovn-controller-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:20 crc kubenswrapper[4742]: E1205 06:15:20.459741 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8 is running failed: container process not found" containerID="56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.460205 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f1e3dac-5031-4dfe-815c-1c1b447f0d64" path="/var/lib/kubelet/pods/7f1e3dac-5031-4dfe-815c-1c1b447f0d64/volumes" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.461245 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a88c6674-8c2f-4868-8839-1ec313fbfe8e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a88c6674-8c2f-4868-8839-1ec313fbfe8e" (UID: "a88c6674-8c2f-4868-8839-1ec313fbfe8e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:20 crc kubenswrapper[4742]: E1205 06:15:20.461370 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8 is running failed: container process not found" containerID="56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 06:15:20 crc kubenswrapper[4742]: E1205 06:15:20.461429 4742 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-tgnp6" podUID="504b6b10-062b-4d3c-8202-fcfd97bc57aa" containerName="ovsdb-server" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.461435 4742 generic.go:334] "Generic (PLEG): container finished" podID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerID="402a519ada8012b4c837384aa46b5de9d5a53090de81a4bfd5fca5e66afd80ab" exitCode=0 Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.461460 4742 generic.go:334] "Generic (PLEG): container finished" podID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerID="89f787d3dcbe6d0e4ec54aa9195f1fe45b50844797a04c3b326966f17201a671" exitCode=0 Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.461469 4742 generic.go:334] "Generic (PLEG): container finished" podID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerID="9b48fcbbeeb0dfa6813285a8982e885fd781741008cdbdef2351e5277caa44d7" exitCode=0 Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.461476 4742 generic.go:334] "Generic (PLEG): container finished" podID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerID="662fdbccb819aa757a5eacbc682c4c9d90ae7096d453acac04fbb8d2c2d724e9" exitCode=0 Dec 05 06:15:20 crc kubenswrapper[4742]: E1205 06:15:20.462330 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2fe19f91420a01210a21d4371d649e6ddfbc92c7b68947c6b76d971d5d43a7cc" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.463072 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a53c417d-f914-40f7-a7dd-47cafb2b6718" path="/var/lib/kubelet/pods/a53c417d-f914-40f7-a7dd-47cafb2b6718/volumes" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.463628 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a998c383-44cc-4b30-a27b-57860fdd3353" path="/var/lib/kubelet/pods/a998c383-44cc-4b30-a27b-57860fdd3353/volumes" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.464194 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebded868-aaf1-4294-bec1-ec504cdf1810" path="/var/lib/kubelet/pods/ebded868-aaf1-4294-bec1-ec504cdf1810/volumes" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.469175 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe8a103e-e284-4ec2-b566-32a1180870c6" path="/var/lib/kubelet/pods/fe8a103e-e284-4ec2-b566-32a1180870c6/volumes" Dec 05 06:15:20 crc kubenswrapper[4742]: E1205 06:15:20.470850 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2fe19f91420a01210a21d4371d649e6ddfbc92c7b68947c6b76d971d5d43a7cc" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 06:15:20 crc kubenswrapper[4742]: E1205 06:15:20.480633 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2fe19f91420a01210a21d4371d649e6ddfbc92c7b68947c6b76d971d5d43a7cc" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 06:15:20 crc kubenswrapper[4742]: E1205 06:15:20.480673 4742 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-tgnp6" podUID="504b6b10-062b-4d3c-8202-fcfd97bc57aa" containerName="ovs-vswitchd" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.488682 4742 generic.go:334] "Generic (PLEG): container finished" podID="fa30e851-f383-42c0-9e09-d8c896ed77ad" containerID="9859f27d8f4bb1f62c35ce8e77ebd26f8aca7e99762a5eb2b0f4ad252e8f2430" exitCode=143 Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.508338 4742 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.510001 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9n84z" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.516074 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.525675 4742 generic.go:334] "Generic (PLEG): container finished" podID="e9974486-076d-4493-af32-a08eef334572" containerID="c58c31d14e6bc541855c1db0c9f365ab77a7e68becb2933d6d951d6d108a2537" exitCode=143 Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.538981 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_b8e993d8-0221-4214-b00a-ca745e716bbe/ovsdbserver-sb/0.log" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.539486 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 06:15:20 crc kubenswrapper[4742]: E1205 06:15:20.550438 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="37ce92fb4c8b7e7a04ce6e6e4b58a8a399c55f7403d7878e536352ac8232319e" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 06:15:20 crc kubenswrapper[4742]: E1205 06:15:20.552381 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="37ce92fb4c8b7e7a04ce6e6e4b58a8a399c55f7403d7878e536352ac8232319e" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.554384 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-kb752" Dec 05 06:15:20 crc kubenswrapper[4742]: E1205 06:15:20.563679 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="37ce92fb4c8b7e7a04ce6e6e4b58a8a399c55f7403d7878e536352ac8232319e" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 06:15:20 crc kubenswrapper[4742]: E1205 06:15:20.563750 4742 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="a1b7e898-ff4e-4523-8602-18d5937c3e5f" containerName="nova-cell1-conductor-conductor" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.577786 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a88c6674-8c2f-4868-8839-1ec313fbfe8e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.577848 4742 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.603381 4742 generic.go:334] "Generic (PLEG): container finished" podID="aa702931-d853-4f8b-b0d8-58f5476bb7c2" containerID="69ab6c82edf2ce10d8987c3b5a9194e318538298be58a196bccfa67b05fbaae7" exitCode=143 Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.641578 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8e993d8-0221-4214-b00a-ca745e716bbe-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "b8e993d8-0221-4214-b00a-ca745e716bbe" (UID: "b8e993d8-0221-4214-b00a-ca745e716bbe"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.673969 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8e993d8-0221-4214-b00a-ca745e716bbe-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "b8e993d8-0221-4214-b00a-ca745e716bbe" (UID: "b8e993d8-0221-4214-b00a-ca745e716bbe"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.674231 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a88c6674-8c2f-4868-8839-1ec313fbfe8e-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "a88c6674-8c2f-4868-8839-1ec313fbfe8e" (UID: "a88c6674-8c2f-4868-8839-1ec313fbfe8e"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.689236 4742 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b8e993d8-0221-4214-b00a-ca745e716bbe-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.689276 4742 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b8e993d8-0221-4214-b00a-ca745e716bbe-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.689287 4742 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a88c6674-8c2f-4868-8839-1ec313fbfe8e-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.835555 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glancebc83-account-delete-wqtlb"] Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.836202 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement9800-account-delete-mlmb7"] Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.836312 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell12c4b-account-delete-hgg9m"] Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.836397 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b","Type":"ContainerDied","Data":"402a519ada8012b4c837384aa46b5de9d5a53090de81a4bfd5fca5e66afd80ab"} Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.836482 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b","Type":"ContainerDied","Data":"89f787d3dcbe6d0e4ec54aa9195f1fe45b50844797a04c3b326966f17201a671"} Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.836545 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b","Type":"ContainerDied","Data":"9b48fcbbeeb0dfa6813285a8982e885fd781741008cdbdef2351e5277caa44d7"} Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.836607 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b","Type":"ContainerDied","Data":"662fdbccb819aa757a5eacbc682c4c9d90ae7096d453acac04fbb8d2c2d724e9"} Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.836781 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-66f7f988b5-b5pzf" event={"ID":"fa30e851-f383-42c0-9e09-d8c896ed77ad","Type":"ContainerDied","Data":"9859f27d8f4bb1f62c35ce8e77ebd26f8aca7e99762a5eb2b0f4ad252e8f2430"} Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.836864 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novaapie0d5-account-delete-294ls"] Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.837099 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novacell036a3-account-delete-mmf6x"] Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.837203 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-9n84z" event={"ID":"b5df8784-b63d-41b7-a542-dcf53ea6cc5e","Type":"ContainerDied","Data":"f917511ece062ed88b054cee51df7c07882cb08aec93d7338f27131f1bc76bf5"} Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.837688 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5b594b6ccb-vbxpj" event={"ID":"e9974486-076d-4493-af32-a08eef334572","Type":"ContainerDied","Data":"c58c31d14e6bc541855c1db0c9f365ab77a7e68becb2933d6d951d6d108a2537"} Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.839107 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"b8e993d8-0221-4214-b00a-ca745e716bbe","Type":"ContainerDied","Data":"3da10f38100c90d2e73c2c8207528c35dd5293060f371cb44777cbb35f8867fc"} Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.839359 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinderedde-account-delete-pwsvp" event={"ID":"6a5ca1f6-73b0-43da-82c6-995495666585","Type":"ContainerStarted","Data":"ff2c23489eeb12a736b3cfe4f0639cf009f56c7dbb016087059c4fd472b82f8d"} Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.839666 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-kb752" event={"ID":"8485e5ca-5372-441f-9e02-3df086991b2c","Type":"ContainerDied","Data":"7d3c13ea158c40cc1383e521e2ecf77c4f7b88c285b0644cf06dac0450540cee"} Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.839870 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron5be2-account-delete-l9sfl" event={"ID":"4f2ab762-07a0-426d-a84a-a53ad7e2fef0","Type":"ContainerStarted","Data":"6d886ffde936b3bedb2d5ad14181164ead61f68ee3c2274575a845e381579ea6"} Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.840017 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"aa702931-d853-4f8b-b0d8-58f5476bb7c2","Type":"ContainerDied","Data":"69ab6c82edf2ce10d8987c3b5a9194e318538298be58a196bccfa67b05fbaae7"} Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.876774 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a88c6674-8c2f-4868-8839-1ec313fbfe8e-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "a88c6674-8c2f-4868-8839-1ec313fbfe8e" (UID: "a88c6674-8c2f-4868-8839-1ec313fbfe8e"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.888877 4742 scope.go:117] "RemoveContainer" containerID="54edd9b1ddd6e1ba491286ac963077f358b82344ad584ecba6ecb84b4f7da42c" Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.895170 4742 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a88c6674-8c2f-4868-8839-1ec313fbfe8e-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:20 crc kubenswrapper[4742]: E1205 06:15:20.895362 4742 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Dec 05 06:15:20 crc kubenswrapper[4742]: E1205 06:15:20.895420 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-config-data podName:d6b096f4-483e-48c5-a3e1-a178c0c5ae6e nodeName:}" failed. No retries permitted until 2025-12-05 06:15:24.895406085 +0000 UTC m=+1400.807541147 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-config-data") pod "rabbitmq-server-0" (UID: "d6b096f4-483e-48c5-a3e1-a178c0c5ae6e") : configmap "rabbitmq-config-data" not found Dec 05 06:15:20 crc kubenswrapper[4742]: I1205 06:15:20.987297 4742 scope.go:117] "RemoveContainer" containerID="0b24c5bc7a890ac19ac54b2ce282a48fb159a89cb8f1121185c3c1f4ebc77ba4" Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.019141 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.029510 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5f7476cfc7-5r2mm" Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.046787 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-kb752"] Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.062047 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-kb752"] Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.071478 4742 scope.go:117] "RemoveContainer" containerID="3fa7e542679f95885cdd8ab99d9224e870ebcce54df90ffe1a480a639c7703e9" Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.105290 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8ef1f42c-4004-49d9-9456-4d4df074004f-etc-swift\") pod \"8ef1f42c-4004-49d9-9456-4d4df074004f\" (UID: \"8ef1f42c-4004-49d9-9456-4d4df074004f\") " Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.105379 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5pgq5\" (UniqueName: \"kubernetes.io/projected/3b535626-d96c-4843-bc25-c4fafa967b23-kube-api-access-5pgq5\") pod \"3b535626-d96c-4843-bc25-c4fafa967b23\" (UID: \"3b535626-d96c-4843-bc25-c4fafa967b23\") " Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.105422 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zwsbx\" (UniqueName: \"kubernetes.io/projected/8ef1f42c-4004-49d9-9456-4d4df074004f-kube-api-access-zwsbx\") pod \"8ef1f42c-4004-49d9-9456-4d4df074004f\" (UID: \"8ef1f42c-4004-49d9-9456-4d4df074004f\") " Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.105460 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b535626-d96c-4843-bc25-c4fafa967b23-scripts\") pod \"3b535626-d96c-4843-bc25-c4fafa967b23\" (UID: \"3b535626-d96c-4843-bc25-c4fafa967b23\") " Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.105483 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ef1f42c-4004-49d9-9456-4d4df074004f-combined-ca-bundle\") pod \"8ef1f42c-4004-49d9-9456-4d4df074004f\" (UID: \"8ef1f42c-4004-49d9-9456-4d4df074004f\") " Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.105510 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ef1f42c-4004-49d9-9456-4d4df074004f-log-httpd\") pod \"8ef1f42c-4004-49d9-9456-4d4df074004f\" (UID: \"8ef1f42c-4004-49d9-9456-4d4df074004f\") " Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.105527 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3b535626-d96c-4843-bc25-c4fafa967b23-etc-machine-id\") pod \"3b535626-d96c-4843-bc25-c4fafa967b23\" (UID: \"3b535626-d96c-4843-bc25-c4fafa967b23\") " Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.105569 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ef1f42c-4004-49d9-9456-4d4df074004f-run-httpd\") pod \"8ef1f42c-4004-49d9-9456-4d4df074004f\" (UID: \"8ef1f42c-4004-49d9-9456-4d4df074004f\") " Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.105629 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b535626-d96c-4843-bc25-c4fafa967b23-config-data\") pod \"3b535626-d96c-4843-bc25-c4fafa967b23\" (UID: \"3b535626-d96c-4843-bc25-c4fafa967b23\") " Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.105649 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b535626-d96c-4843-bc25-c4fafa967b23-combined-ca-bundle\") pod \"3b535626-d96c-4843-bc25-c4fafa967b23\" (UID: \"3b535626-d96c-4843-bc25-c4fafa967b23\") " Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.105670 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3b535626-d96c-4843-bc25-c4fafa967b23-config-data-custom\") pod \"3b535626-d96c-4843-bc25-c4fafa967b23\" (UID: \"3b535626-d96c-4843-bc25-c4fafa967b23\") " Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.105705 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8ef1f42c-4004-49d9-9456-4d4df074004f-internal-tls-certs\") pod \"8ef1f42c-4004-49d9-9456-4d4df074004f\" (UID: \"8ef1f42c-4004-49d9-9456-4d4df074004f\") " Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.105726 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ef1f42c-4004-49d9-9456-4d4df074004f-config-data\") pod \"8ef1f42c-4004-49d9-9456-4d4df074004f\" (UID: \"8ef1f42c-4004-49d9-9456-4d4df074004f\") " Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.105749 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8ef1f42c-4004-49d9-9456-4d4df074004f-public-tls-certs\") pod \"8ef1f42c-4004-49d9-9456-4d4df074004f\" (UID: \"8ef1f42c-4004-49d9-9456-4d4df074004f\") " Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.107175 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ef1f42c-4004-49d9-9456-4d4df074004f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "8ef1f42c-4004-49d9-9456-4d4df074004f" (UID: "8ef1f42c-4004-49d9-9456-4d4df074004f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.110628 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ef1f42c-4004-49d9-9456-4d4df074004f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "8ef1f42c-4004-49d9-9456-4d4df074004f" (UID: "8ef1f42c-4004-49d9-9456-4d4df074004f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.111217 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3b535626-d96c-4843-bc25-c4fafa967b23-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "3b535626-d96c-4843-bc25-c4fafa967b23" (UID: "3b535626-d96c-4843-bc25-c4fafa967b23"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.188541 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b535626-d96c-4843-bc25-c4fafa967b23-scripts" (OuterVolumeSpecName: "scripts") pod "3b535626-d96c-4843-bc25-c4fafa967b23" (UID: "3b535626-d96c-4843-bc25-c4fafa967b23"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.209976 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3b535626-d96c-4843-bc25-c4fafa967b23-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.210006 4742 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ef1f42c-4004-49d9-9456-4d4df074004f-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.210035 4742 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3b535626-d96c-4843-bc25-c4fafa967b23-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.210047 4742 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8ef1f42c-4004-49d9-9456-4d4df074004f-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.219759 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ef1f42c-4004-49d9-9456-4d4df074004f-kube-api-access-zwsbx" (OuterVolumeSpecName: "kube-api-access-zwsbx") pod "8ef1f42c-4004-49d9-9456-4d4df074004f" (UID: "8ef1f42c-4004-49d9-9456-4d4df074004f"). InnerVolumeSpecName "kube-api-access-zwsbx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.227733 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b535626-d96c-4843-bc25-c4fafa967b23-kube-api-access-5pgq5" (OuterVolumeSpecName: "kube-api-access-5pgq5") pod "3b535626-d96c-4843-bc25-c4fafa967b23" (UID: "3b535626-d96c-4843-bc25-c4fafa967b23"). InnerVolumeSpecName "kube-api-access-5pgq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.227798 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b535626-d96c-4843-bc25-c4fafa967b23-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "3b535626-d96c-4843-bc25-c4fafa967b23" (UID: "3b535626-d96c-4843-bc25-c4fafa967b23"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.234146 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.239884 4742 scope.go:117] "RemoveContainer" containerID="75b9102a45bd7348ee8155d1c3b59cc40301c88e77fb8f3ce1855a075666bb90" Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.242023 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 06:15:21 crc kubenswrapper[4742]: I1205 06:15:21.247839 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ef1f42c-4004-49d9-9456-4d4df074004f-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "8ef1f42c-4004-49d9-9456-4d4df074004f" (UID: "8ef1f42c-4004-49d9-9456-4d4df074004f"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:21 crc kubenswrapper[4742]: E1205 06:15:21.300074 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 19a84075a4a5b72bcffa6b3cad99511d70c5764348b285fdef52052f3200d9ee is running failed: container process not found" containerID="19a84075a4a5b72bcffa6b3cad99511d70c5764348b285fdef52052f3200d9ee" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 06:15:21 crc kubenswrapper[4742]: E1205 06:15:21.300291 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 19a84075a4a5b72bcffa6b3cad99511d70c5764348b285fdef52052f3200d9ee is running failed: container process not found" containerID="19a84075a4a5b72bcffa6b3cad99511d70c5764348b285fdef52052f3200d9ee" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 06:15:21 crc kubenswrapper[4742]: E1205 06:15:21.300453 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 19a84075a4a5b72bcffa6b3cad99511d70c5764348b285fdef52052f3200d9ee is running failed: container process not found" containerID="19a84075a4a5b72bcffa6b3cad99511d70c5764348b285fdef52052f3200d9ee" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 06:15:21 crc kubenswrapper[4742]: E1205 06:15:21.300475 4742 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 19a84075a4a5b72bcffa6b3cad99511d70c5764348b285fdef52052f3200d9ee is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="931816fd-7570-46ac-b555-368b196b030c" containerName="nova-scheduler-scheduler" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.306600 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.312686 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5pgq5\" (UniqueName: \"kubernetes.io/projected/3b535626-d96c-4843-bc25-c4fafa967b23-kube-api-access-5pgq5\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.312762 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zwsbx\" (UniqueName: \"kubernetes.io/projected/8ef1f42c-4004-49d9-9456-4d4df074004f-kube-api-access-zwsbx\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.312800 4742 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3b535626-d96c-4843-bc25-c4fafa967b23-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.312814 4742 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8ef1f42c-4004-49d9-9456-4d4df074004f-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.314547 4742 scope.go:117] "RemoveContainer" containerID="3aa39b731afc81b18e6dc9bcdfc9e62825bea59d828add91802d92234f28b7ac" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.353172 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.360292 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.401111 4742 scope.go:117] "RemoveContainer" containerID="ee00b1fde374119673d9ac5f72a08628b75db2b614f020d61712b793da9557eb" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.415391 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-78fw7\" (UniqueName: \"kubernetes.io/projected/e07b564a-eb31-4f88-ae69-44cceef519a4-kube-api-access-78fw7\") pod \"e07b564a-eb31-4f88-ae69-44cceef519a4\" (UID: \"e07b564a-eb31-4f88-ae69-44cceef519a4\") " Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.415434 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/85632fad-1ab6-495e-9049-6b5dad9cc955-config-data-default\") pod \"85632fad-1ab6-495e-9049-6b5dad9cc955\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") " Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.415490 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/85632fad-1ab6-495e-9049-6b5dad9cc955-galera-tls-certs\") pod \"85632fad-1ab6-495e-9049-6b5dad9cc955\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") " Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.415535 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/931816fd-7570-46ac-b555-368b196b030c-config-data\") pod \"931816fd-7570-46ac-b555-368b196b030c\" (UID: \"931816fd-7570-46ac-b555-368b196b030c\") " Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.415561 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e07b564a-eb31-4f88-ae69-44cceef519a4-config-data\") pod \"e07b564a-eb31-4f88-ae69-44cceef519a4\" (UID: \"e07b564a-eb31-4f88-ae69-44cceef519a4\") " Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.415681 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/85632fad-1ab6-495e-9049-6b5dad9cc955-kolla-config\") pod \"85632fad-1ab6-495e-9049-6b5dad9cc955\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") " Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.415724 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85632fad-1ab6-495e-9049-6b5dad9cc955-combined-ca-bundle\") pod \"85632fad-1ab6-495e-9049-6b5dad9cc955\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") " Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.415763 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s5f6j\" (UniqueName: \"kubernetes.io/projected/85632fad-1ab6-495e-9049-6b5dad9cc955-kube-api-access-s5f6j\") pod \"85632fad-1ab6-495e-9049-6b5dad9cc955\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") " Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.415787 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e07b564a-eb31-4f88-ae69-44cceef519a4-combined-ca-bundle\") pod \"e07b564a-eb31-4f88-ae69-44cceef519a4\" (UID: \"e07b564a-eb31-4f88-ae69-44cceef519a4\") " Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.415816 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j56l5\" (UniqueName: \"kubernetes.io/projected/931816fd-7570-46ac-b555-368b196b030c-kube-api-access-j56l5\") pod \"931816fd-7570-46ac-b555-368b196b030c\" (UID: \"931816fd-7570-46ac-b555-368b196b030c\") " Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.415860 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85632fad-1ab6-495e-9049-6b5dad9cc955-operator-scripts\") pod \"85632fad-1ab6-495e-9049-6b5dad9cc955\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") " Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.416309 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85632fad-1ab6-495e-9049-6b5dad9cc955-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "85632fad-1ab6-495e-9049-6b5dad9cc955" (UID: "85632fad-1ab6-495e-9049-6b5dad9cc955"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.416788 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/e07b564a-eb31-4f88-ae69-44cceef519a4-nova-novncproxy-tls-certs\") pod \"e07b564a-eb31-4f88-ae69-44cceef519a4\" (UID: \"e07b564a-eb31-4f88-ae69-44cceef519a4\") " Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.416857 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/931816fd-7570-46ac-b555-368b196b030c-combined-ca-bundle\") pod \"931816fd-7570-46ac-b555-368b196b030c\" (UID: \"931816fd-7570-46ac-b555-368b196b030c\") " Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.416892 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/e07b564a-eb31-4f88-ae69-44cceef519a4-vencrypt-tls-certs\") pod \"e07b564a-eb31-4f88-ae69-44cceef519a4\" (UID: \"e07b564a-eb31-4f88-ae69-44cceef519a4\") " Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.416955 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/85632fad-1ab6-495e-9049-6b5dad9cc955-config-data-generated\") pod \"85632fad-1ab6-495e-9049-6b5dad9cc955\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") " Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.417047 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"85632fad-1ab6-495e-9049-6b5dad9cc955\" (UID: \"85632fad-1ab6-495e-9049-6b5dad9cc955\") " Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.417725 4742 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/85632fad-1ab6-495e-9049-6b5dad9cc955-config-data-default\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.418809 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85632fad-1ab6-495e-9049-6b5dad9cc955-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "85632fad-1ab6-495e-9049-6b5dad9cc955" (UID: "85632fad-1ab6-495e-9049-6b5dad9cc955"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.420774 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85632fad-1ab6-495e-9049-6b5dad9cc955-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "85632fad-1ab6-495e-9049-6b5dad9cc955" (UID: "85632fad-1ab6-495e-9049-6b5dad9cc955"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.420858 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85632fad-1ab6-495e-9049-6b5dad9cc955-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "85632fad-1ab6-495e-9049-6b5dad9cc955" (UID: "85632fad-1ab6-495e-9049-6b5dad9cc955"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.503952 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/931816fd-7570-46ac-b555-368b196b030c-kube-api-access-j56l5" (OuterVolumeSpecName: "kube-api-access-j56l5") pod "931816fd-7570-46ac-b555-368b196b030c" (UID: "931816fd-7570-46ac-b555-368b196b030c"). InnerVolumeSpecName "kube-api-access-j56l5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.509922 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85632fad-1ab6-495e-9049-6b5dad9cc955-kube-api-access-s5f6j" (OuterVolumeSpecName: "kube-api-access-s5f6j") pod "85632fad-1ab6-495e-9049-6b5dad9cc955" (UID: "85632fad-1ab6-495e-9049-6b5dad9cc955"). InnerVolumeSpecName "kube-api-access-s5f6j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.510352 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e07b564a-eb31-4f88-ae69-44cceef519a4-kube-api-access-78fw7" (OuterVolumeSpecName: "kube-api-access-78fw7") pod "e07b564a-eb31-4f88-ae69-44cceef519a4" (UID: "e07b564a-eb31-4f88-ae69-44cceef519a4"). InnerVolumeSpecName "kube-api-access-78fw7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.521413 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s5f6j\" (UniqueName: \"kubernetes.io/projected/85632fad-1ab6-495e-9049-6b5dad9cc955-kube-api-access-s5f6j\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.521471 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j56l5\" (UniqueName: \"kubernetes.io/projected/931816fd-7570-46ac-b555-368b196b030c-kube-api-access-j56l5\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.521486 4742 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85632fad-1ab6-495e-9049-6b5dad9cc955-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.521498 4742 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/85632fad-1ab6-495e-9049-6b5dad9cc955-config-data-generated\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.521510 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-78fw7\" (UniqueName: \"kubernetes.io/projected/e07b564a-eb31-4f88-ae69-44cceef519a4-kube-api-access-78fw7\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.521522 4742 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/85632fad-1ab6-495e-9049-6b5dad9cc955-kolla-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.568211 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "mysql-db") pod "85632fad-1ab6-495e-9049-6b5dad9cc955" (UID: "85632fad-1ab6-495e-9049-6b5dad9cc955"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.625375 4742 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.638845 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"3b535626-d96c-4843-bc25-c4fafa967b23","Type":"ContainerDied","Data":"9deb350faf5db1ec38aca15ec8bff9a43fe2fb9c0b6da1c7036ec9571f4e0bd0"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.638887 4742 scope.go:117] "RemoveContainer" containerID="a6798471637e201a3f0d2d87ce22e1f621bb66ed7382d07497c5dc7f71a7d869" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.638972 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.666230 4742 generic.go:334] "Generic (PLEG): container finished" podID="4f2ab762-07a0-426d-a84a-a53ad7e2fef0" containerID="409654510ed79dcada2dee8cd274d71ff06fbb890d292b51685c3f9fa3bb7761" exitCode=0 Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.666296 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron5be2-account-delete-l9sfl" event={"ID":"4f2ab762-07a0-426d-a84a-a53ad7e2fef0","Type":"ContainerDied","Data":"409654510ed79dcada2dee8cd274d71ff06fbb890d292b51685c3f9fa3bb7761"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.672404 4742 generic.go:334] "Generic (PLEG): container finished" podID="338b9928-12cd-4db4-806e-4f42612c5ab6" containerID="85c2b0d2bbfb8b6e3c396234c6a2e7332b515e33d6f3309f0bbf9466f03f62a0" exitCode=0 Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.672462 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"338b9928-12cd-4db4-806e-4f42612c5ab6","Type":"ContainerDied","Data":"85c2b0d2bbfb8b6e3c396234c6a2e7332b515e33d6f3309f0bbf9466f03f62a0"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.672491 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"338b9928-12cd-4db4-806e-4f42612c5ab6","Type":"ContainerDied","Data":"0631c7c2c5eef14ea1a5a5a7e1bfefb3a3e91d5cfd46a553ce1ba9013d0194ae"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.672505 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0631c7c2c5eef14ea1a5a5a7e1bfefb3a3e91d5cfd46a553ce1ba9013d0194ae" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.675307 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5f7476cfc7-5r2mm" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.675350 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5f7476cfc7-5r2mm" event={"ID":"8ef1f42c-4004-49d9-9456-4d4df074004f","Type":"ContainerDied","Data":"9319b3b7ddd9e01160e5f745add76719f3f9fa58204f0f144e712e1cdfea8080"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.694678 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glancebc83-account-delete-wqtlb" event={"ID":"c4227032-1b4c-4059-b91f-cf5ece6b20b2","Type":"ContainerStarted","Data":"283b7e47966c3e7e48227899036a6b2462d6b7f1bae3051a2674f4656193492e"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.694715 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glancebc83-account-delete-wqtlb" event={"ID":"c4227032-1b4c-4059-b91f-cf5ece6b20b2","Type":"ContainerStarted","Data":"6ec6f8accf2fbb4a89f53a19b1dbf89057fd23c0b3c6a80d6eb3b0f70c54fd70"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.705597 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e07b564a-eb31-4f88-ae69-44cceef519a4","Type":"ContainerDied","Data":"ed2e09149f79b267b6ba274d866e9cd4359e49ce566236be2010265253607f75"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.705719 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.717583 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement9800-account-delete-mlmb7" event={"ID":"8b956518-9768-477f-9acb-1fc3459427f7","Type":"ContainerStarted","Data":"5a696de0ae41e7833ee83f7613624a0e873d7ff3c9d51fceb0d56ed64b7f8f9d"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.717642 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement9800-account-delete-mlmb7" event={"ID":"8b956518-9768-477f-9acb-1fc3459427f7","Type":"ContainerStarted","Data":"929912473a4e6168616da96a203323308ae8dec8da4b56cde10961befddafcc2"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.718860 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell12c4b-account-delete-hgg9m" event={"ID":"42c1f939-2d9c-4a8d-a341-cbce22551d58","Type":"ContainerStarted","Data":"3669c98b97b3e1829ea751201c3c1ec4d8ce17c91e62ad74bc0203a441ad7b9e"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.718891 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell12c4b-account-delete-hgg9m" event={"ID":"42c1f939-2d9c-4a8d-a341-cbce22551d58","Type":"ContainerStarted","Data":"30d0d66a335bd4adc9b6f3ccaa7d5b82e84e19e5a5613d9ae3958b36bc7a62af"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.718967 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/novacell12c4b-account-delete-hgg9m" podUID="42c1f939-2d9c-4a8d-a341-cbce22551d58" containerName="mariadb-account-delete" containerID="cri-o://3669c98b97b3e1829ea751201c3c1ec4d8ce17c91e62ad74bc0203a441ad7b9e" gracePeriod=30 Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.723835 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/931816fd-7570-46ac-b555-368b196b030c-config-data" (OuterVolumeSpecName: "config-data") pod "931816fd-7570-46ac-b555-368b196b030c" (UID: "931816fd-7570-46ac-b555-368b196b030c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.727247 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/931816fd-7570-46ac-b555-368b196b030c-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.739255 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85632fad-1ab6-495e-9049-6b5dad9cc955-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "85632fad-1ab6-495e-9049-6b5dad9cc955" (UID: "85632fad-1ab6-495e-9049-6b5dad9cc955"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.756664 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement9800-account-delete-mlmb7" podStartSLOduration=5.756644746 podStartE2EDuration="5.756644746s" podCreationTimestamp="2025-12-05 06:15:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:15:21.741336252 +0000 UTC m=+1397.653471314" watchObservedRunningTime="2025-12-05 06:15:21.756644746 +0000 UTC m=+1397.668779808" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.758306 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbicanf9da-account-delete-4bv5f" event={"ID":"61c4b9e1-5266-49eb-8348-3b1034562185","Type":"ContainerStarted","Data":"3840798ac23a9b0863d3c14f3c0de6637145f225e4b32a1a83018f90c174fe6e"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.758345 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbicanf9da-account-delete-4bv5f" event={"ID":"61c4b9e1-5266-49eb-8348-3b1034562185","Type":"ContainerStarted","Data":"483d1bc42de90c15f97297df684f3dc97dd2bc83aa03249cb365cfe7c27d6ae0"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.761085 4742 generic.go:334] "Generic (PLEG): container finished" podID="85632fad-1ab6-495e-9049-6b5dad9cc955" containerID="43f59b0dd0673acbf1e8b1cb19d732574d31c64af500c88a90c6c1409d92d526" exitCode=0 Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.761123 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"85632fad-1ab6-495e-9049-6b5dad9cc955","Type":"ContainerDied","Data":"43f59b0dd0673acbf1e8b1cb19d732574d31c64af500c88a90c6c1409d92d526"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.761140 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"85632fad-1ab6-495e-9049-6b5dad9cc955","Type":"ContainerDied","Data":"7e696c06cbb0b2c6f72506516b0181c7f54cff155f629e3b9d17eecfd4d56382"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.761187 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.763146 4742 generic.go:334] "Generic (PLEG): container finished" podID="931816fd-7570-46ac-b555-368b196b030c" containerID="19a84075a4a5b72bcffa6b3cad99511d70c5764348b285fdef52052f3200d9ee" exitCode=0 Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.763177 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"931816fd-7570-46ac-b555-368b196b030c","Type":"ContainerDied","Data":"19a84075a4a5b72bcffa6b3cad99511d70c5764348b285fdef52052f3200d9ee"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.763193 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"931816fd-7570-46ac-b555-368b196b030c","Type":"ContainerDied","Data":"e8e2523c90a163b16a9aee5217ec72bd7e15cbfe79e812a2935d7b6aad45ddfc"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.763226 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.765581 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapie0d5-account-delete-294ls" event={"ID":"1a690523-b1e4-4dd5-b280-58fd8b91b3bf","Type":"ContainerStarted","Data":"694419fe80f2a74b277bf90929588ec4d678e3a527f5229dd86c95a2ec6b9190"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.765704 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/novacell12c4b-account-delete-hgg9m" podStartSLOduration=4.765685544 podStartE2EDuration="4.765685544s" podCreationTimestamp="2025-12-05 06:15:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:15:21.756156443 +0000 UTC m=+1397.668291525" watchObservedRunningTime="2025-12-05 06:15:21.765685544 +0000 UTC m=+1397.677820606" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.776842 4742 generic.go:334] "Generic (PLEG): container finished" podID="6a5ca1f6-73b0-43da-82c6-995495666585" containerID="ff2c23489eeb12a736b3cfe4f0639cf009f56c7dbb016087059c4fd472b82f8d" exitCode=0 Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.776941 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinderedde-account-delete-pwsvp" event={"ID":"6a5ca1f6-73b0-43da-82c6-995495666585","Type":"ContainerDied","Data":"ff2c23489eeb12a736b3cfe4f0639cf009f56c7dbb016087059c4fd472b82f8d"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.797582 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell036a3-account-delete-mmf6x" event={"ID":"5b2208e7-3101-4090-9f35-fba640d2f1d9","Type":"ContainerStarted","Data":"5ab507fe1c04a7056d68dca7aa4680091e6ec02733336d33099e102c29640adb"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.797628 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell036a3-account-delete-mmf6x" event={"ID":"5b2208e7-3101-4090-9f35-fba640d2f1d9","Type":"ContainerStarted","Data":"33bbec5e4406ae9f140c27451e1c8802550957395f5f4f57b77c6a8a654b921c"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.802079 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e07b564a-eb31-4f88-ae69-44cceef519a4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e07b564a-eb31-4f88-ae69-44cceef519a4" (UID: "e07b564a-eb31-4f88-ae69-44cceef519a4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.808429 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e07b564a-eb31-4f88-ae69-44cceef519a4-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "e07b564a-eb31-4f88-ae69-44cceef519a4" (UID: "e07b564a-eb31-4f88-ae69-44cceef519a4"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.812890 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbicanf9da-account-delete-4bv5f" podStartSLOduration=5.812868179 podStartE2EDuration="5.812868179s" podCreationTimestamp="2025-12-05 06:15:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:15:21.773840889 +0000 UTC m=+1397.685975951" watchObservedRunningTime="2025-12-05 06:15:21.812868179 +0000 UTC m=+1397.725003251" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.818555 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ef1f42c-4004-49d9-9456-4d4df074004f-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "8ef1f42c-4004-49d9-9456-4d4df074004f" (UID: "8ef1f42c-4004-49d9-9456-4d4df074004f"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.829454 4742 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8ef1f42c-4004-49d9-9456-4d4df074004f-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.829479 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85632fad-1ab6-495e-9049-6b5dad9cc955-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.829487 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e07b564a-eb31-4f88-ae69-44cceef519a4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.829495 4742 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/e07b564a-eb31-4f88-ae69-44cceef519a4-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.833505 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e07b564a-eb31-4f88-ae69-44cceef519a4-config-data" (OuterVolumeSpecName: "config-data") pod "e07b564a-eb31-4f88-ae69-44cceef519a4" (UID: "e07b564a-eb31-4f88-ae69-44cceef519a4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.838519 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/novacell036a3-account-delete-mmf6x" podStartSLOduration=4.838497045 podStartE2EDuration="4.838497045s" podCreationTimestamp="2025-12-05 06:15:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 06:15:21.818656362 +0000 UTC m=+1397.730791414" watchObservedRunningTime="2025-12-05 06:15:21.838497045 +0000 UTC m=+1397.750632107" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.838860 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/931816fd-7570-46ac-b555-368b196b030c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "931816fd-7570-46ac-b555-368b196b030c" (UID: "931816fd-7570-46ac-b555-368b196b030c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.847189 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="d6b096f4-483e-48c5-a3e1-a178c0c5ae6e" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.101:5671: connect: connection refused" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.897849 4742 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.905209 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ef1f42c-4004-49d9-9456-4d4df074004f-config-data" (OuterVolumeSpecName: "config-data") pod "8ef1f42c-4004-49d9-9456-4d4df074004f" (UID: "8ef1f42c-4004-49d9-9456-4d4df074004f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.910432 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ef1f42c-4004-49d9-9456-4d4df074004f-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "8ef1f42c-4004-49d9-9456-4d4df074004f" (UID: "8ef1f42c-4004-49d9-9456-4d4df074004f"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.913338 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e07b564a-eb31-4f88-ae69-44cceef519a4-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "e07b564a-eb31-4f88-ae69-44cceef519a4" (UID: "e07b564a-eb31-4f88-ae69-44cceef519a4"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.920782 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85632fad-1ab6-495e-9049-6b5dad9cc955-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "85632fad-1ab6-495e-9049-6b5dad9cc955" (UID: "85632fad-1ab6-495e-9049-6b5dad9cc955"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.923158 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ef1f42c-4004-49d9-9456-4d4df074004f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8ef1f42c-4004-49d9-9456-4d4df074004f" (UID: "8ef1f42c-4004-49d9-9456-4d4df074004f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.923241 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b535626-d96c-4843-bc25-c4fafa967b23-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3b535626-d96c-4843-bc25-c4fafa967b23" (UID: "3b535626-d96c-4843-bc25-c4fafa967b23"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.931388 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ef1f42c-4004-49d9-9456-4d4df074004f-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.931417 4742 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8ef1f42c-4004-49d9-9456-4d4df074004f-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.931433 4742 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/e07b564a-eb31-4f88-ae69-44cceef519a4-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.932230 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/931816fd-7570-46ac-b555-368b196b030c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.932248 4742 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.932260 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ef1f42c-4004-49d9-9456-4d4df074004f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.932273 4742 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/85632fad-1ab6-495e-9049-6b5dad9cc955-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.932287 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b535626-d96c-4843-bc25-c4fafa967b23-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.932297 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e07b564a-eb31-4f88-ae69-44cceef519a4-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:22 crc kubenswrapper[4742]: E1205 06:15:21.932162 4742 configmap.go:193] Couldn't get configMap openstack/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Dec 05 06:15:22 crc kubenswrapper[4742]: E1205 06:15:21.932412 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/42c1f939-2d9c-4a8d-a341-cbce22551d58-operator-scripts podName:42c1f939-2d9c-4a8d-a341-cbce22551d58 nodeName:}" failed. No retries permitted until 2025-12-05 06:15:25.932342061 +0000 UTC m=+1401.844477133 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/42c1f939-2d9c-4a8d-a341-cbce22551d58-operator-scripts") pod "novacell12c4b-account-delete-hgg9m" (UID: "42c1f939-2d9c-4a8d-a341-cbce22551d58") : configmap "openstack-cell1-scripts" not found Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:21.932247 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b535626-d96c-4843-bc25-c4fafa967b23-config-data" (OuterVolumeSpecName: "config-data") pod "3b535626-d96c-4843-bc25-c4fafa967b23" (UID: "3b535626-d96c-4843-bc25-c4fafa967b23"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.003704 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.004008 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="20ae73b5-51f4-4bcf-ba9c-c35f566cd07e" containerName="ceilometer-central-agent" containerID="cri-o://68c5adefa894fd4e4d4330c2dd3417e387b076c9891ece3a5627225e0d0c2d97" gracePeriod=30 Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.005034 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="20ae73b5-51f4-4bcf-ba9c-c35f566cd07e" containerName="proxy-httpd" containerID="cri-o://2ecbc57c9c7699122ac359af92d4cd9ed8008de0c52d7893deb3493891be0b73" gracePeriod=30 Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.005209 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="20ae73b5-51f4-4bcf-ba9c-c35f566cd07e" containerName="sg-core" containerID="cri-o://a39fff3b53abe8f6e2a0ba533233662c0b7fe156e685ced3f6efd8f2c79f2d9f" gracePeriod=30 Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.005175 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="20ae73b5-51f4-4bcf-ba9c-c35f566cd07e" containerName="ceilometer-notification-agent" containerID="cri-o://4c2c76854fdd4144c4cb1d5dac7df46a88805bb483cbfd5411dfc86a014d19ea" gracePeriod=30 Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.027229 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.027784 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="fb4dce96-8228-455b-9edc-37a62af6e732" containerName="kube-state-metrics" containerID="cri-o://63b79a58b1a6b877064a1e2ed25d6d589d4ca79328f599b17692a53d79df74d6" gracePeriod=30 Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.034163 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b535626-d96c-4843-bc25-c4fafa967b23-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.140999 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="e6063f78-1b45-493e-ae25-62239a1ed5e3" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.163:8776/healthcheck\": read tcp 10.217.0.2:60794->10.217.0.163:8776: read: connection reset by peer" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.186166 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="7b5d8165-e06e-4600-9cab-9cf84c010725" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.102:5671: connect: connection refused" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.206087 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.206378 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/memcached-0" podUID="ec392288-7e80-4956-836c-d400d4460ebc" containerName="memcached" containerID="cri-o://b91b925cb775f3c104773cf1cf7bcfec00234be6262a07996845dc0aa637e20b" gracePeriod=30 Dec 05 06:15:22 crc kubenswrapper[4742]: E1205 06:15:22.227104 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 85c2b0d2bbfb8b6e3c396234c6a2e7332b515e33d6f3309f0bbf9466f03f62a0 is running failed: container process not found" containerID="85c2b0d2bbfb8b6e3c396234c6a2e7332b515e33d6f3309f0bbf9466f03f62a0" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 06:15:22 crc kubenswrapper[4742]: E1205 06:15:22.228465 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 85c2b0d2bbfb8b6e3c396234c6a2e7332b515e33d6f3309f0bbf9466f03f62a0 is running failed: container process not found" containerID="85c2b0d2bbfb8b6e3c396234c6a2e7332b515e33d6f3309f0bbf9466f03f62a0" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 06:15:22 crc kubenswrapper[4742]: E1205 06:15:22.240190 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 85c2b0d2bbfb8b6e3c396234c6a2e7332b515e33d6f3309f0bbf9466f03f62a0 is running failed: container process not found" containerID="85c2b0d2bbfb8b6e3c396234c6a2e7332b515e33d6f3309f0bbf9466f03f62a0" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 06:15:22 crc kubenswrapper[4742]: E1205 06:15:22.240258 4742 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 85c2b0d2bbfb8b6e3c396234c6a2e7332b515e33d6f3309f0bbf9466f03f62a0 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="338b9928-12cd-4db4-806e-4f42612c5ab6" containerName="nova-cell0-conductor-conductor" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.271442 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-pfkmv"] Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.301134 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-r2qnr"] Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.317119 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone7678-account-delete-n6gdz"] Dec 05 06:15:22 crc kubenswrapper[4742]: E1205 06:15:22.319793 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ef1f42c-4004-49d9-9456-4d4df074004f" containerName="proxy-httpd" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.319810 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ef1f42c-4004-49d9-9456-4d4df074004f" containerName="proxy-httpd" Dec 05 06:15:22 crc kubenswrapper[4742]: E1205 06:15:22.319828 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85632fad-1ab6-495e-9049-6b5dad9cc955" containerName="mysql-bootstrap" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.319834 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="85632fad-1ab6-495e-9049-6b5dad9cc955" containerName="mysql-bootstrap" Dec 05 06:15:22 crc kubenswrapper[4742]: E1205 06:15:22.319846 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85632fad-1ab6-495e-9049-6b5dad9cc955" containerName="galera" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.319851 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="85632fad-1ab6-495e-9049-6b5dad9cc955" containerName="galera" Dec 05 06:15:22 crc kubenswrapper[4742]: E1205 06:15:22.319860 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a88c6674-8c2f-4868-8839-1ec313fbfe8e" containerName="openstack-network-exporter" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.319866 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="a88c6674-8c2f-4868-8839-1ec313fbfe8e" containerName="openstack-network-exporter" Dec 05 06:15:22 crc kubenswrapper[4742]: E1205 06:15:22.319876 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8485e5ca-5372-441f-9e02-3df086991b2c" containerName="dnsmasq-dns" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.319881 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="8485e5ca-5372-441f-9e02-3df086991b2c" containerName="dnsmasq-dns" Dec 05 06:15:22 crc kubenswrapper[4742]: E1205 06:15:22.319893 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ef1f42c-4004-49d9-9456-4d4df074004f" containerName="proxy-server" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.319899 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ef1f42c-4004-49d9-9456-4d4df074004f" containerName="proxy-server" Dec 05 06:15:22 crc kubenswrapper[4742]: E1205 06:15:22.319908 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e07b564a-eb31-4f88-ae69-44cceef519a4" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.319913 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="e07b564a-eb31-4f88-ae69-44cceef519a4" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 06:15:22 crc kubenswrapper[4742]: E1205 06:15:22.319923 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8485e5ca-5372-441f-9e02-3df086991b2c" containerName="init" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.319928 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="8485e5ca-5372-441f-9e02-3df086991b2c" containerName="init" Dec 05 06:15:22 crc kubenswrapper[4742]: E1205 06:15:22.319938 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="931816fd-7570-46ac-b555-368b196b030c" containerName="nova-scheduler-scheduler" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.319943 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="931816fd-7570-46ac-b555-368b196b030c" containerName="nova-scheduler-scheduler" Dec 05 06:15:22 crc kubenswrapper[4742]: E1205 06:15:22.319951 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b535626-d96c-4843-bc25-c4fafa967b23" containerName="probe" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.319957 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b535626-d96c-4843-bc25-c4fafa967b23" containerName="probe" Dec 05 06:15:22 crc kubenswrapper[4742]: E1205 06:15:22.319968 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8e993d8-0221-4214-b00a-ca745e716bbe" containerName="openstack-network-exporter" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.319973 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8e993d8-0221-4214-b00a-ca745e716bbe" containerName="openstack-network-exporter" Dec 05 06:15:22 crc kubenswrapper[4742]: E1205 06:15:22.319985 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5df8784-b63d-41b7-a542-dcf53ea6cc5e" containerName="ovn-controller" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.319992 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5df8784-b63d-41b7-a542-dcf53ea6cc5e" containerName="ovn-controller" Dec 05 06:15:22 crc kubenswrapper[4742]: E1205 06:15:22.320003 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8e993d8-0221-4214-b00a-ca745e716bbe" containerName="ovsdbserver-sb" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.320008 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8e993d8-0221-4214-b00a-ca745e716bbe" containerName="ovsdbserver-sb" Dec 05 06:15:22 crc kubenswrapper[4742]: E1205 06:15:22.320017 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b535626-d96c-4843-bc25-c4fafa967b23" containerName="cinder-scheduler" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.320022 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b535626-d96c-4843-bc25-c4fafa967b23" containerName="cinder-scheduler" Dec 05 06:15:22 crc kubenswrapper[4742]: E1205 06:15:22.320063 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a88c6674-8c2f-4868-8839-1ec313fbfe8e" containerName="ovsdbserver-nb" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.320070 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="a88c6674-8c2f-4868-8839-1ec313fbfe8e" containerName="ovsdbserver-nb" Dec 05 06:15:22 crc kubenswrapper[4742]: E1205 06:15:22.320081 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebded868-aaf1-4294-bec1-ec504cdf1810" containerName="openstack-network-exporter" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.320086 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebded868-aaf1-4294-bec1-ec504cdf1810" containerName="openstack-network-exporter" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.320239 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="8485e5ca-5372-441f-9e02-3df086991b2c" containerName="dnsmasq-dns" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.320255 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5df8784-b63d-41b7-a542-dcf53ea6cc5e" containerName="ovn-controller" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.320266 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ef1f42c-4004-49d9-9456-4d4df074004f" containerName="proxy-server" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.320275 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="a88c6674-8c2f-4868-8839-1ec313fbfe8e" containerName="openstack-network-exporter" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.320282 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8e993d8-0221-4214-b00a-ca745e716bbe" containerName="ovsdbserver-sb" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.320289 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="931816fd-7570-46ac-b555-368b196b030c" containerName="nova-scheduler-scheduler" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.320297 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ef1f42c-4004-49d9-9456-4d4df074004f" containerName="proxy-httpd" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.320305 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b535626-d96c-4843-bc25-c4fafa967b23" containerName="probe" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.320315 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="e07b564a-eb31-4f88-ae69-44cceef519a4" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.320330 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8e993d8-0221-4214-b00a-ca745e716bbe" containerName="openstack-network-exporter" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.320340 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="85632fad-1ab6-495e-9049-6b5dad9cc955" containerName="galera" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.320352 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebded868-aaf1-4294-bec1-ec504cdf1810" containerName="openstack-network-exporter" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.320359 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b535626-d96c-4843-bc25-c4fafa967b23" containerName="cinder-scheduler" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.320367 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="a88c6674-8c2f-4868-8839-1ec313fbfe8e" containerName="ovsdbserver-nb" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.320904 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone7678-account-delete-n6gdz" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.362100 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-pfkmv"] Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.363605 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/672ee527-1fdd-4abc-b327-6f9eb6b07080-operator-scripts\") pod \"keystone7678-account-delete-n6gdz\" (UID: \"672ee527-1fdd-4abc-b327-6f9eb6b07080\") " pod="openstack/keystone7678-account-delete-n6gdz" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.363677 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4nj9\" (UniqueName: \"kubernetes.io/projected/672ee527-1fdd-4abc-b327-6f9eb6b07080-kube-api-access-r4nj9\") pod \"keystone7678-account-delete-n6gdz\" (UID: \"672ee527-1fdd-4abc-b327-6f9eb6b07080\") " pod="openstack/keystone7678-account-delete-n6gdz" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.377622 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-r2qnr"] Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.386814 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5b594b6ccb-vbxpj" podUID="e9974486-076d-4493-af32-a08eef334572" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.162:9311/healthcheck\": read tcp 10.217.0.2:58732->10.217.0.162:9311: read: connection reset by peer" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.386985 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5b594b6ccb-vbxpj" podUID="e9974486-076d-4493-af32-a08eef334572" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.162:9311/healthcheck\": read tcp 10.217.0.2:58742->10.217.0.162:9311: read: connection reset by peer" Dec 05 06:15:22 crc kubenswrapper[4742]: E1205 06:15:22.466398 4742 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 05 06:15:22 crc kubenswrapper[4742]: E1205 06:15:22.466480 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/672ee527-1fdd-4abc-b327-6f9eb6b07080-operator-scripts podName:672ee527-1fdd-4abc-b327-6f9eb6b07080 nodeName:}" failed. No retries permitted until 2025-12-05 06:15:22.966459382 +0000 UTC m=+1398.878594434 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/672ee527-1fdd-4abc-b327-6f9eb6b07080-operator-scripts") pod "keystone7678-account-delete-n6gdz" (UID: "672ee527-1fdd-4abc-b327-6f9eb6b07080") : configmap "openstack-scripts" not found Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.466489 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/672ee527-1fdd-4abc-b327-6f9eb6b07080-operator-scripts\") pod \"keystone7678-account-delete-n6gdz\" (UID: \"672ee527-1fdd-4abc-b327-6f9eb6b07080\") " pod="openstack/keystone7678-account-delete-n6gdz" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.466618 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4nj9\" (UniqueName: \"kubernetes.io/projected/672ee527-1fdd-4abc-b327-6f9eb6b07080-kube-api-access-r4nj9\") pod \"keystone7678-account-delete-n6gdz\" (UID: \"672ee527-1fdd-4abc-b327-6f9eb6b07080\") " pod="openstack/keystone7678-account-delete-n6gdz" Dec 05 06:15:22 crc kubenswrapper[4742]: E1205 06:15:22.471876 4742 projected.go:194] Error preparing data for projected volume kube-api-access-r4nj9 for pod openstack/keystone7678-account-delete-n6gdz: failed to fetch token: serviceaccounts "galera-openstack" not found Dec 05 06:15:22 crc kubenswrapper[4742]: E1205 06:15:22.471969 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/672ee527-1fdd-4abc-b327-6f9eb6b07080-kube-api-access-r4nj9 podName:672ee527-1fdd-4abc-b327-6f9eb6b07080 nodeName:}" failed. No retries permitted until 2025-12-05 06:15:22.971944686 +0000 UTC m=+1398.884079748 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-r4nj9" (UniqueName: "kubernetes.io/projected/672ee527-1fdd-4abc-b327-6f9eb6b07080-kube-api-access-r4nj9") pod "keystone7678-account-delete-n6gdz" (UID: "672ee527-1fdd-4abc-b327-6f9eb6b07080") : failed to fetch token: serviceaccounts "galera-openstack" not found Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.479437 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e2f2b5a-8ced-49df-ae20-f64d13a9938b" path="/var/lib/kubelet/pods/3e2f2b5a-8ced-49df-ae20-f64d13a9938b/volumes" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.480212 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e75b039-76da-40c8-a486-0f310cafa125" path="/var/lib/kubelet/pods/7e75b039-76da-40c8-a486-0f310cafa125/volumes" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.480891 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8485e5ca-5372-441f-9e02-3df086991b2c" path="/var/lib/kubelet/pods/8485e5ca-5372-441f-9e02-3df086991b2c/volumes" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.482227 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a88c6674-8c2f-4868-8839-1ec313fbfe8e" path="/var/lib/kubelet/pods/a88c6674-8c2f-4868-8839-1ec313fbfe8e/volumes" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.484165 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-655b696477-tbv7n"] Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.486179 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/keystone-655b696477-tbv7n" podUID="8d993905-0c76-454d-8eac-8a93674522db" containerName="keystone-api" containerID="cri-o://19b3e95c718c3665ff0670d2d832d7d93791bf4dcc4a5e276fe859096cdb96f7" gracePeriod=30 Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.508954 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone7678-account-delete-n6gdz"] Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.523578 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.549326 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-dzkx8"] Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.560365 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-e0d5-account-create-update-7vcpx"] Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.567969 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-dzkx8"] Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.574919 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-e0d5-account-create-update-7vcpx"] Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.582936 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapie0d5-account-delete-294ls"] Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.590127 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-75chc"] Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.597315 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-75chc"] Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.611354 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-7678-account-create-update-pv5qw"] Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.615657 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone7678-account-delete-n6gdz"] Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.622961 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-7678-account-create-update-pv5qw"] Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.630885 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-25s6b"] Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.638393 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-25s6b"] Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.653908 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-36a3-account-create-update-ltscs"] Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.660429 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell036a3-account-delete-mmf6x"] Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.666915 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-36a3-account-create-update-ltscs"] Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.704672 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-galera-0" podUID="c4ba4170-0240-42d9-85f4-cf3587f39f02" containerName="galera" containerID="cri-o://d650e05da8f6d00412f038e1cc6a4768171d4ecaec420e4cd6c8c0a06f434838" gracePeriod=30 Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.826326 4742 generic.go:334] "Generic (PLEG): container finished" podID="1a690523-b1e4-4dd5-b280-58fd8b91b3bf" containerID="d9012d4468b51b4718118fb879533aaf34f98c7740bbf64d2aadb2a37a988b47" exitCode=0 Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.826868 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapie0d5-account-delete-294ls" event={"ID":"1a690523-b1e4-4dd5-b280-58fd8b91b3bf","Type":"ContainerDied","Data":"d9012d4468b51b4718118fb879533aaf34f98c7740bbf64d2aadb2a37a988b47"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.835542 4742 generic.go:334] "Generic (PLEG): container finished" podID="e3428207-2cb4-47d8-b4d8-941c3a4928fb" containerID="b8f02737722d7ebc14c897ea39f901ead1646c8d5e8658a44265bfe41044eed8" exitCode=0 Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.835628 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e3428207-2cb4-47d8-b4d8-941c3a4928fb","Type":"ContainerDied","Data":"b8f02737722d7ebc14c897ea39f901ead1646c8d5e8658a44265bfe41044eed8"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.850494 4742 generic.go:334] "Generic (PLEG): container finished" podID="fb4dce96-8228-455b-9edc-37a62af6e732" containerID="63b79a58b1a6b877064a1e2ed25d6d589d4ca79328f599b17692a53d79df74d6" exitCode=2 Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.850602 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"fb4dce96-8228-455b-9edc-37a62af6e732","Type":"ContainerDied","Data":"63b79a58b1a6b877064a1e2ed25d6d589d4ca79328f599b17692a53d79df74d6"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.866717 4742 generic.go:334] "Generic (PLEG): container finished" podID="d7a764d5-447f-483d-b819-0e398e749600" containerID="a1b30e5b41ae0a67e19767b1176483a9b711ab959c0c1007661ee4670c30e081" exitCode=0 Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.866793 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d7a764d5-447f-483d-b819-0e398e749600","Type":"ContainerDied","Data":"a1b30e5b41ae0a67e19767b1176483a9b711ab959c0c1007661ee4670c30e081"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.870892 4742 generic.go:334] "Generic (PLEG): container finished" podID="61c4b9e1-5266-49eb-8348-3b1034562185" containerID="3840798ac23a9b0863d3c14f3c0de6637145f225e4b32a1a83018f90c174fe6e" exitCode=0 Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.870959 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbicanf9da-account-delete-4bv5f" event={"ID":"61c4b9e1-5266-49eb-8348-3b1034562185","Type":"ContainerDied","Data":"3840798ac23a9b0863d3c14f3c0de6637145f225e4b32a1a83018f90c174fe6e"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.874573 4742 generic.go:334] "Generic (PLEG): container finished" podID="e42757b3-029e-4fe9-917f-73331394524e" containerID="87ced7f756fbe6fb669f5837d287cbfc896ceb71b65ba49a0991a9a56aa7f8a6" exitCode=0 Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.874617 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-765b847d64-jgxg4" event={"ID":"e42757b3-029e-4fe9-917f-73331394524e","Type":"ContainerDied","Data":"87ced7f756fbe6fb669f5837d287cbfc896ceb71b65ba49a0991a9a56aa7f8a6"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.876436 4742 generic.go:334] "Generic (PLEG): container finished" podID="e6063f78-1b45-493e-ae25-62239a1ed5e3" containerID="9ddfbe5ffee29c713d306ff006d773b5b100e240b7d408ad28e4d4bab8088896" exitCode=0 Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.876479 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e6063f78-1b45-493e-ae25-62239a1ed5e3","Type":"ContainerDied","Data":"9ddfbe5ffee29c713d306ff006d773b5b100e240b7d408ad28e4d4bab8088896"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.877785 4742 generic.go:334] "Generic (PLEG): container finished" podID="8b956518-9768-477f-9acb-1fc3459427f7" containerID="5a696de0ae41e7833ee83f7613624a0e873d7ff3c9d51fceb0d56ed64b7f8f9d" exitCode=0 Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.877831 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement9800-account-delete-mlmb7" event={"ID":"8b956518-9768-477f-9acb-1fc3459427f7","Type":"ContainerDied","Data":"5a696de0ae41e7833ee83f7613624a0e873d7ff3c9d51fceb0d56ed64b7f8f9d"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.879160 4742 generic.go:334] "Generic (PLEG): container finished" podID="42c1f939-2d9c-4a8d-a341-cbce22551d58" containerID="3669c98b97b3e1829ea751201c3c1ec4d8ce17c91e62ad74bc0203a441ad7b9e" exitCode=1 Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.879217 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell12c4b-account-delete-hgg9m" event={"ID":"42c1f939-2d9c-4a8d-a341-cbce22551d58","Type":"ContainerDied","Data":"3669c98b97b3e1829ea751201c3c1ec4d8ce17c91e62ad74bc0203a441ad7b9e"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.880165 4742 generic.go:334] "Generic (PLEG): container finished" podID="5b2208e7-3101-4090-9f35-fba640d2f1d9" containerID="5ab507fe1c04a7056d68dca7aa4680091e6ec02733336d33099e102c29640adb" exitCode=0 Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.880214 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell036a3-account-delete-mmf6x" event={"ID":"5b2208e7-3101-4090-9f35-fba640d2f1d9","Type":"ContainerDied","Data":"5ab507fe1c04a7056d68dca7aa4680091e6ec02733336d33099e102c29640adb"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.884387 4742 generic.go:334] "Generic (PLEG): container finished" podID="c4227032-1b4c-4059-b91f-cf5ece6b20b2" containerID="283b7e47966c3e7e48227899036a6b2462d6b7f1bae3051a2674f4656193492e" exitCode=0 Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.884453 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glancebc83-account-delete-wqtlb" event={"ID":"c4227032-1b4c-4059-b91f-cf5ece6b20b2","Type":"ContainerDied","Data":"283b7e47966c3e7e48227899036a6b2462d6b7f1bae3051a2674f4656193492e"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.902090 4742 generic.go:334] "Generic (PLEG): container finished" podID="aa702931-d853-4f8b-b0d8-58f5476bb7c2" containerID="1f4b1e5b484c4b109f9165cf542665fc7a93e90318c7b4dd1ddb7da94d8a3032" exitCode=0 Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.902238 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"aa702931-d853-4f8b-b0d8-58f5476bb7c2","Type":"ContainerDied","Data":"1f4b1e5b484c4b109f9165cf542665fc7a93e90318c7b4dd1ddb7da94d8a3032"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.927584 4742 generic.go:334] "Generic (PLEG): container finished" podID="20ae73b5-51f4-4bcf-ba9c-c35f566cd07e" containerID="2ecbc57c9c7699122ac359af92d4cd9ed8008de0c52d7893deb3493891be0b73" exitCode=0 Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.927623 4742 generic.go:334] "Generic (PLEG): container finished" podID="20ae73b5-51f4-4bcf-ba9c-c35f566cd07e" containerID="a39fff3b53abe8f6e2a0ba533233662c0b7fe156e685ced3f6efd8f2c79f2d9f" exitCode=2 Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.927639 4742 generic.go:334] "Generic (PLEG): container finished" podID="20ae73b5-51f4-4bcf-ba9c-c35f566cd07e" containerID="68c5adefa894fd4e4d4330c2dd3417e387b076c9891ece3a5627225e0d0c2d97" exitCode=0 Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.927665 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e","Type":"ContainerDied","Data":"2ecbc57c9c7699122ac359af92d4cd9ed8008de0c52d7893deb3493891be0b73"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.927714 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e","Type":"ContainerDied","Data":"a39fff3b53abe8f6e2a0ba533233662c0b7fe156e685ced3f6efd8f2c79f2d9f"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.927729 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e","Type":"ContainerDied","Data":"68c5adefa894fd4e4d4330c2dd3417e387b076c9891ece3a5627225e0d0c2d97"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.930110 4742 generic.go:334] "Generic (PLEG): container finished" podID="7038cd99-8151-4157-93c6-3b7f5b9ce25e" containerID="4d42993853ddd3815008a6e598dffed4d9fa4416bef732ddaba8c8e33025a533" exitCode=0 Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.930168 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7038cd99-8151-4157-93c6-3b7f5b9ce25e","Type":"ContainerDied","Data":"4d42993853ddd3815008a6e598dffed4d9fa4416bef732ddaba8c8e33025a533"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.934483 4742 generic.go:334] "Generic (PLEG): container finished" podID="e9974486-076d-4493-af32-a08eef334572" containerID="1d507c229540319f85af5a5bb49cd7bea47d3c4c4e80bec322f63230f391811c" exitCode=0 Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.934637 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5b594b6ccb-vbxpj" event={"ID":"e9974486-076d-4493-af32-a08eef334572","Type":"ContainerDied","Data":"1d507c229540319f85af5a5bb49cd7bea47d3c4c4e80bec322f63230f391811c"} Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.995074 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/672ee527-1fdd-4abc-b327-6f9eb6b07080-operator-scripts\") pod \"keystone7678-account-delete-n6gdz\" (UID: \"672ee527-1fdd-4abc-b327-6f9eb6b07080\") " pod="openstack/keystone7678-account-delete-n6gdz" Dec 05 06:15:22 crc kubenswrapper[4742]: I1205 06:15:22.995143 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4nj9\" (UniqueName: \"kubernetes.io/projected/672ee527-1fdd-4abc-b327-6f9eb6b07080-kube-api-access-r4nj9\") pod \"keystone7678-account-delete-n6gdz\" (UID: \"672ee527-1fdd-4abc-b327-6f9eb6b07080\") " pod="openstack/keystone7678-account-delete-n6gdz" Dec 05 06:15:22 crc kubenswrapper[4742]: E1205 06:15:22.995539 4742 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 05 06:15:22 crc kubenswrapper[4742]: E1205 06:15:22.995581 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/672ee527-1fdd-4abc-b327-6f9eb6b07080-operator-scripts podName:672ee527-1fdd-4abc-b327-6f9eb6b07080 nodeName:}" failed. No retries permitted until 2025-12-05 06:15:23.99556909 +0000 UTC m=+1399.907704152 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/672ee527-1fdd-4abc-b327-6f9eb6b07080-operator-scripts") pod "keystone7678-account-delete-n6gdz" (UID: "672ee527-1fdd-4abc-b327-6f9eb6b07080") : configmap "openstack-scripts" not found Dec 05 06:15:22 crc kubenswrapper[4742]: E1205 06:15:22.998129 4742 projected.go:194] Error preparing data for projected volume kube-api-access-r4nj9 for pod openstack/keystone7678-account-delete-n6gdz: failed to fetch token: serviceaccounts "galera-openstack" not found Dec 05 06:15:22 crc kubenswrapper[4742]: E1205 06:15:22.998190 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/672ee527-1fdd-4abc-b327-6f9eb6b07080-kube-api-access-r4nj9 podName:672ee527-1fdd-4abc-b327-6f9eb6b07080 nodeName:}" failed. No retries permitted until 2025-12-05 06:15:23.998173379 +0000 UTC m=+1399.910308441 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-r4nj9" (UniqueName: "kubernetes.io/projected/672ee527-1fdd-4abc-b327-6f9eb6b07080-kube-api-access-r4nj9") pod "keystone7678-account-delete-n6gdz" (UID: "672ee527-1fdd-4abc-b327-6f9eb6b07080") : failed to fetch token: serviceaccounts "galera-openstack" not found Dec 05 06:15:23 crc kubenswrapper[4742]: E1205 06:15:23.309390 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="caa9cfd6937fda940888bb64cbccaa6adf27580ea4e177e3d3adf4b5e4e8b93d" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 05 06:15:23 crc kubenswrapper[4742]: E1205 06:15:23.310978 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="caa9cfd6937fda940888bb64cbccaa6adf27580ea4e177e3d3adf4b5e4e8b93d" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 05 06:15:23 crc kubenswrapper[4742]: E1205 06:15:23.312233 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="caa9cfd6937fda940888bb64cbccaa6adf27580ea4e177e3d3adf4b5e4e8b93d" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Dec 05 06:15:23 crc kubenswrapper[4742]: E1205 06:15:23.312311 4742 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="e7d76df0-4f21-4729-9729-1f2ff54a8332" containerName="ovn-northd" Dec 05 06:15:23 crc kubenswrapper[4742]: E1205 06:15:23.414871 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d650e05da8f6d00412f038e1cc6a4768171d4ecaec420e4cd6c8c0a06f434838" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Dec 05 06:15:23 crc kubenswrapper[4742]: E1205 06:15:23.417089 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d650e05da8f6d00412f038e1cc6a4768171d4ecaec420e4cd6c8c0a06f434838" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Dec 05 06:15:23 crc kubenswrapper[4742]: E1205 06:15:23.421335 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d650e05da8f6d00412f038e1cc6a4768171d4ecaec420e4cd6c8c0a06f434838" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Dec 05 06:15:23 crc kubenswrapper[4742]: E1205 06:15:23.421409 4742 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/openstack-galera-0" podUID="c4ba4170-0240-42d9-85f4-cf3587f39f02" containerName="galera" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.460313 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.473137 4742 scope.go:117] "RemoveContainer" containerID="6ee0e1f6ed8fc4033483315f49001dac70cdc9d56d231f0ed6e4bf14ed5391bf" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.516425 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lf5d9\" (UniqueName: \"kubernetes.io/projected/338b9928-12cd-4db4-806e-4f42612c5ab6-kube-api-access-lf5d9\") pod \"338b9928-12cd-4db4-806e-4f42612c5ab6\" (UID: \"338b9928-12cd-4db4-806e-4f42612c5ab6\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.516924 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/338b9928-12cd-4db4-806e-4f42612c5ab6-combined-ca-bundle\") pod \"338b9928-12cd-4db4-806e-4f42612c5ab6\" (UID: \"338b9928-12cd-4db4-806e-4f42612c5ab6\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.517266 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/338b9928-12cd-4db4-806e-4f42612c5ab6-config-data\") pod \"338b9928-12cd-4db4-806e-4f42612c5ab6\" (UID: \"338b9928-12cd-4db4-806e-4f42612c5ab6\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.533631 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/338b9928-12cd-4db4-806e-4f42612c5ab6-kube-api-access-lf5d9" (OuterVolumeSpecName: "kube-api-access-lf5d9") pod "338b9928-12cd-4db4-806e-4f42612c5ab6" (UID: "338b9928-12cd-4db4-806e-4f42612c5ab6"). InnerVolumeSpecName "kube-api-access-lf5d9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.585043 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/338b9928-12cd-4db4-806e-4f42612c5ab6-config-data" (OuterVolumeSpecName: "config-data") pod "338b9928-12cd-4db4-806e-4f42612c5ab6" (UID: "338b9928-12cd-4db4-806e-4f42612c5ab6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.588253 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/338b9928-12cd-4db4-806e-4f42612c5ab6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "338b9928-12cd-4db4-806e-4f42612c5ab6" (UID: "338b9928-12cd-4db4-806e-4f42612c5ab6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.620246 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lf5d9\" (UniqueName: \"kubernetes.io/projected/338b9928-12cd-4db4-806e-4f42612c5ab6-kube-api-access-lf5d9\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.620284 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/338b9928-12cd-4db4-806e-4f42612c5ab6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.620296 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/338b9928-12cd-4db4-806e-4f42612c5ab6-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.647204 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.661502 4742 scope.go:117] "RemoveContainer" containerID="26e1bb24efd752b2c3019b0bfc4555cbdbfc083437d9055e783ed7089c77d920" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.664651 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell12c4b-account-delete-hgg9m" Dec 05 06:15:23 crc kubenswrapper[4742]: E1205 06:15:23.667783 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-r4nj9 operator-scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/keystone7678-account-delete-n6gdz" podUID="672ee527-1fdd-4abc-b327-6f9eb6b07080" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.671317 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.692763 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.722709 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3428207-2cb4-47d8-b4d8-941c3a4928fb-logs\") pod \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.722776 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3428207-2cb4-47d8-b4d8-941c3a4928fb-config-data\") pod \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.722835 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-58xt4\" (UniqueName: \"kubernetes.io/projected/42c1f939-2d9c-4a8d-a341-cbce22551d58-kube-api-access-58xt4\") pod \"42c1f939-2d9c-4a8d-a341-cbce22551d58\" (UID: \"42c1f939-2d9c-4a8d-a341-cbce22551d58\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.722888 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4wlzj\" (UniqueName: \"kubernetes.io/projected/e3428207-2cb4-47d8-b4d8-941c3a4928fb-kube-api-access-4wlzj\") pod \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.722933 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e3428207-2cb4-47d8-b4d8-941c3a4928fb-httpd-run\") pod \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.722962 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3428207-2cb4-47d8-b4d8-941c3a4928fb-combined-ca-bundle\") pod \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.723177 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/42c1f939-2d9c-4a8d-a341-cbce22551d58-operator-scripts\") pod \"42c1f939-2d9c-4a8d-a341-cbce22551d58\" (UID: \"42c1f939-2d9c-4a8d-a341-cbce22551d58\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.723215 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e3428207-2cb4-47d8-b4d8-941c3a4928fb-internal-tls-certs\") pod \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.723244 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3428207-2cb4-47d8-b4d8-941c3a4928fb-scripts\") pod \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.723271 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\" (UID: \"e3428207-2cb4-47d8-b4d8-941c3a4928fb\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.727866 4742 scope.go:117] "RemoveContainer" containerID="5b04102935046122a7f13426fa065fb74c80fe74f085c0c16a9aab6c2234ef7c" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.728186 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.728804 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e3428207-2cb4-47d8-b4d8-941c3a4928fb-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "e3428207-2cb4-47d8-b4d8-941c3a4928fb" (UID: "e3428207-2cb4-47d8-b4d8-941c3a4928fb"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.729167 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e3428207-2cb4-47d8-b4d8-941c3a4928fb-logs" (OuterVolumeSpecName: "logs") pod "e3428207-2cb4-47d8-b4d8-941c3a4928fb" (UID: "e3428207-2cb4-47d8-b4d8-941c3a4928fb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.730461 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42c1f939-2d9c-4a8d-a341-cbce22551d58-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "42c1f939-2d9c-4a8d-a341-cbce22551d58" (UID: "42c1f939-2d9c-4a8d-a341-cbce22551d58"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.731068 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.738387 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.738441 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-765b847d64-jgxg4" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.739627 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3428207-2cb4-47d8-b4d8-941c3a4928fb-scripts" (OuterVolumeSpecName: "scripts") pod "e3428207-2cb4-47d8-b4d8-941c3a4928fb" (UID: "e3428207-2cb4-47d8-b4d8-941c3a4928fb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.740523 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42c1f939-2d9c-4a8d-a341-cbce22551d58-kube-api-access-58xt4" (OuterVolumeSpecName: "kube-api-access-58xt4") pod "42c1f939-2d9c-4a8d-a341-cbce22551d58" (UID: "42c1f939-2d9c-4a8d-a341-cbce22551d58"). InnerVolumeSpecName "kube-api-access-58xt4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.741591 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5b594b6ccb-vbxpj" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.741693 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3428207-2cb4-47d8-b4d8-941c3a4928fb-kube-api-access-4wlzj" (OuterVolumeSpecName: "kube-api-access-4wlzj") pod "e3428207-2cb4-47d8-b4d8-941c3a4928fb" (UID: "e3428207-2cb4-47d8-b4d8-941c3a4928fb"). InnerVolumeSpecName "kube-api-access-4wlzj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.742164 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "e3428207-2cb4-47d8-b4d8-941c3a4928fb" (UID: "e3428207-2cb4-47d8-b4d8-941c3a4928fb"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.763827 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.773107 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3428207-2cb4-47d8-b4d8-941c3a4928fb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e3428207-2cb4-47d8-b4d8-941c3a4928fb" (UID: "e3428207-2cb4-47d8-b4d8-941c3a4928fb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.809371 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3428207-2cb4-47d8-b4d8-941c3a4928fb-config-data" (OuterVolumeSpecName: "config-data") pod "e3428207-2cb4-47d8-b4d8-941c3a4928fb" (UID: "e3428207-2cb4-47d8-b4d8-941c3a4928fb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.819444 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinderedde-account-delete-pwsvp" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.821563 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.821929 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.822828 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.830023 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-config-data\") pod \"e6063f78-1b45-493e-ae25-62239a1ed5e3\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.830127 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-config-data-custom\") pod \"e6063f78-1b45-493e-ae25-62239a1ed5e3\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.830187 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-public-tls-certs\") pod \"e6063f78-1b45-493e-ae25-62239a1ed5e3\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.830226 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e42757b3-029e-4fe9-917f-73331394524e-public-tls-certs\") pod \"e42757b3-029e-4fe9-917f-73331394524e\" (UID: \"e42757b3-029e-4fe9-917f-73331394524e\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.830247 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7a764d5-447f-483d-b819-0e398e749600-config-data\") pod \"d7a764d5-447f-483d-b819-0e398e749600\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.830278 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-combined-ca-bundle\") pod \"e6063f78-1b45-493e-ae25-62239a1ed5e3\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.830310 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e9974486-076d-4493-af32-a08eef334572-config-data-custom\") pod \"e9974486-076d-4493-af32-a08eef334572\" (UID: \"e9974486-076d-4493-af32-a08eef334572\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.830331 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"d7a764d5-447f-483d-b819-0e398e749600\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.830352 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e42757b3-029e-4fe9-917f-73331394524e-internal-tls-certs\") pod \"e42757b3-029e-4fe9-917f-73331394524e\" (UID: \"e42757b3-029e-4fe9-917f-73331394524e\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.830372 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9974486-076d-4493-af32-a08eef334572-combined-ca-bundle\") pod \"e9974486-076d-4493-af32-a08eef334572\" (UID: \"e9974486-076d-4493-af32-a08eef334572\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.830444 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e42757b3-029e-4fe9-917f-73331394524e-combined-ca-bundle\") pod \"e42757b3-029e-4fe9-917f-73331394524e\" (UID: \"e42757b3-029e-4fe9-917f-73331394524e\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.830476 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e42757b3-029e-4fe9-917f-73331394524e-logs\") pod \"e42757b3-029e-4fe9-917f-73331394524e\" (UID: \"e42757b3-029e-4fe9-917f-73331394524e\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.830501 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e42757b3-029e-4fe9-917f-73331394524e-config-data\") pod \"e42757b3-029e-4fe9-917f-73331394524e\" (UID: \"e42757b3-029e-4fe9-917f-73331394524e\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.830525 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cwdc9\" (UniqueName: \"kubernetes.io/projected/e42757b3-029e-4fe9-917f-73331394524e-kube-api-access-cwdc9\") pod \"e42757b3-029e-4fe9-917f-73331394524e\" (UID: \"e42757b3-029e-4fe9-917f-73331394524e\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.830559 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-scripts\") pod \"e6063f78-1b45-493e-ae25-62239a1ed5e3\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.830590 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7a764d5-447f-483d-b819-0e398e749600-scripts\") pod \"d7a764d5-447f-483d-b819-0e398e749600\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.830644 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d7a764d5-447f-483d-b819-0e398e749600-httpd-run\") pod \"d7a764d5-447f-483d-b819-0e398e749600\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.830670 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e42757b3-029e-4fe9-917f-73331394524e-scripts\") pod \"e42757b3-029e-4fe9-917f-73331394524e\" (UID: \"e42757b3-029e-4fe9-917f-73331394524e\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.830688 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9974486-076d-4493-af32-a08eef334572-public-tls-certs\") pod \"e9974486-076d-4493-af32-a08eef334572\" (UID: \"e9974486-076d-4493-af32-a08eef334572\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.830733 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/fb4dce96-8228-455b-9edc-37a62af6e732-kube-state-metrics-tls-config\") pod \"fb4dce96-8228-455b-9edc-37a62af6e732\" (UID: \"fb4dce96-8228-455b-9edc-37a62af6e732\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.830764 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9974486-076d-4493-af32-a08eef334572-logs\") pod \"e9974486-076d-4493-af32-a08eef334572\" (UID: \"e9974486-076d-4493-af32-a08eef334572\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.830809 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7a764d5-447f-483d-b819-0e398e749600-public-tls-certs\") pod \"d7a764d5-447f-483d-b819-0e398e749600\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.830833 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v88cc\" (UniqueName: \"kubernetes.io/projected/e9974486-076d-4493-af32-a08eef334572-kube-api-access-v88cc\") pod \"e9974486-076d-4493-af32-a08eef334572\" (UID: \"e9974486-076d-4493-af32-a08eef334572\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.830850 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hl9lc\" (UniqueName: \"kubernetes.io/projected/e6063f78-1b45-493e-ae25-62239a1ed5e3-kube-api-access-hl9lc\") pod \"e6063f78-1b45-493e-ae25-62239a1ed5e3\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.830897 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7a764d5-447f-483d-b819-0e398e749600-combined-ca-bundle\") pod \"d7a764d5-447f-483d-b819-0e398e749600\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.830978 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6063f78-1b45-493e-ae25-62239a1ed5e3-logs\") pod \"e6063f78-1b45-493e-ae25-62239a1ed5e3\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.831003 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-internal-tls-certs\") pod \"e6063f78-1b45-493e-ae25-62239a1ed5e3\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.831046 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb4dce96-8228-455b-9edc-37a62af6e732-kube-state-metrics-tls-certs\") pod \"fb4dce96-8228-455b-9edc-37a62af6e732\" (UID: \"fb4dce96-8228-455b-9edc-37a62af6e732\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.831098 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb4dce96-8228-455b-9edc-37a62af6e732-combined-ca-bundle\") pod \"fb4dce96-8228-455b-9edc-37a62af6e732\" (UID: \"fb4dce96-8228-455b-9edc-37a62af6e732\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.831122 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e6063f78-1b45-493e-ae25-62239a1ed5e3-etc-machine-id\") pod \"e6063f78-1b45-493e-ae25-62239a1ed5e3\" (UID: \"e6063f78-1b45-493e-ae25-62239a1ed5e3\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.831180 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9974486-076d-4493-af32-a08eef334572-internal-tls-certs\") pod \"e9974486-076d-4493-af32-a08eef334572\" (UID: \"e9974486-076d-4493-af32-a08eef334572\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.831235 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-whz8j\" (UniqueName: \"kubernetes.io/projected/fb4dce96-8228-455b-9edc-37a62af6e732-kube-api-access-whz8j\") pod \"fb4dce96-8228-455b-9edc-37a62af6e732\" (UID: \"fb4dce96-8228-455b-9edc-37a62af6e732\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.831268 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9974486-076d-4493-af32-a08eef334572-config-data\") pod \"e9974486-076d-4493-af32-a08eef334572\" (UID: \"e9974486-076d-4493-af32-a08eef334572\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.831293 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7a764d5-447f-483d-b819-0e398e749600-logs\") pod \"d7a764d5-447f-483d-b819-0e398e749600\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.831342 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c8fbr\" (UniqueName: \"kubernetes.io/projected/d7a764d5-447f-483d-b819-0e398e749600-kube-api-access-c8fbr\") pod \"d7a764d5-447f-483d-b819-0e398e749600\" (UID: \"d7a764d5-447f-483d-b819-0e398e749600\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.831827 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-58xt4\" (UniqueName: \"kubernetes.io/projected/42c1f939-2d9c-4a8d-a341-cbce22551d58-kube-api-access-58xt4\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.831846 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4wlzj\" (UniqueName: \"kubernetes.io/projected/e3428207-2cb4-47d8-b4d8-941c3a4928fb-kube-api-access-4wlzj\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.831855 4742 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e3428207-2cb4-47d8-b4d8-941c3a4928fb-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.831868 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3428207-2cb4-47d8-b4d8-941c3a4928fb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.831877 4742 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/42c1f939-2d9c-4a8d-a341-cbce22551d58-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.831886 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3428207-2cb4-47d8-b4d8-941c3a4928fb-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.831905 4742 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.831914 4742 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3428207-2cb4-47d8-b4d8-941c3a4928fb-logs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.831924 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3428207-2cb4-47d8-b4d8-941c3a4928fb-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.833948 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glancebc83-account-delete-wqtlb" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.834781 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "e6063f78-1b45-493e-ae25-62239a1ed5e3" (UID: "e6063f78-1b45-493e-ae25-62239a1ed5e3"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.840134 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.841471 4742 scope.go:117] "RemoveContainer" containerID="e3d277fe25b44b8eabf49e249cf506a11614da370c828683e2d2b01d444716d4" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.849039 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron5be2-account-delete-l9sfl" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.849286 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6063f78-1b45-493e-ae25-62239a1ed5e3-logs" (OuterVolumeSpecName: "logs") pod "e6063f78-1b45-493e-ae25-62239a1ed5e3" (UID: "e6063f78-1b45-493e-ae25-62239a1ed5e3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.853560 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7a764d5-447f-483d-b819-0e398e749600-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "d7a764d5-447f-483d-b819-0e398e749600" (UID: "d7a764d5-447f-483d-b819-0e398e749600"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.856351 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7a764d5-447f-483d-b819-0e398e749600-logs" (OuterVolumeSpecName: "logs") pod "d7a764d5-447f-483d-b819-0e398e749600" (UID: "d7a764d5-447f-483d-b819-0e398e749600"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.856708 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e6063f78-1b45-493e-ae25-62239a1ed5e3-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "e6063f78-1b45-493e-ae25-62239a1ed5e3" (UID: "e6063f78-1b45-493e-ae25-62239a1ed5e3"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.858705 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e42757b3-029e-4fe9-917f-73331394524e-logs" (OuterVolumeSpecName: "logs") pod "e42757b3-029e-4fe9-917f-73331394524e" (UID: "e42757b3-029e-4fe9-917f-73331394524e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.860956 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.862277 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e9974486-076d-4493-af32-a08eef334572-logs" (OuterVolumeSpecName: "logs") pod "e9974486-076d-4493-af32-a08eef334572" (UID: "e9974486-076d-4493-af32-a08eef334572"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.867143 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9974486-076d-4493-af32-a08eef334572-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "e9974486-076d-4493-af32-a08eef334572" (UID: "e9974486-076d-4493-af32-a08eef334572"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.867192 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9974486-076d-4493-af32-a08eef334572-kube-api-access-v88cc" (OuterVolumeSpecName: "kube-api-access-v88cc") pod "e9974486-076d-4493-af32-a08eef334572" (UID: "e9974486-076d-4493-af32-a08eef334572"). InnerVolumeSpecName "kube-api-access-v88cc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.867337 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.871738 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "glance") pod "d7a764d5-447f-483d-b819-0e398e749600" (UID: "d7a764d5-447f-483d-b819-0e398e749600"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.873032 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7a764d5-447f-483d-b819-0e398e749600-scripts" (OuterVolumeSpecName: "scripts") pod "d7a764d5-447f-483d-b819-0e398e749600" (UID: "d7a764d5-447f-483d-b819-0e398e749600"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.873551 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7a764d5-447f-483d-b819-0e398e749600-kube-api-access-c8fbr" (OuterVolumeSpecName: "kube-api-access-c8fbr") pod "d7a764d5-447f-483d-b819-0e398e749600" (UID: "d7a764d5-447f-483d-b819-0e398e749600"). InnerVolumeSpecName "kube-api-access-c8fbr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.873636 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6063f78-1b45-493e-ae25-62239a1ed5e3-kube-api-access-hl9lc" (OuterVolumeSpecName: "kube-api-access-hl9lc") pod "e6063f78-1b45-493e-ae25-62239a1ed5e3" (UID: "e6063f78-1b45-493e-ae25-62239a1ed5e3"). InnerVolumeSpecName "kube-api-access-hl9lc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.873782 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e42757b3-029e-4fe9-917f-73331394524e-kube-api-access-cwdc9" (OuterVolumeSpecName: "kube-api-access-cwdc9") pod "e42757b3-029e-4fe9-917f-73331394524e" (UID: "e42757b3-029e-4fe9-917f-73331394524e"). InnerVolumeSpecName "kube-api-access-cwdc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.874033 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb4dce96-8228-455b-9edc-37a62af6e732-kube-api-access-whz8j" (OuterVolumeSpecName: "kube-api-access-whz8j") pod "fb4dce96-8228-455b-9edc-37a62af6e732" (UID: "fb4dce96-8228-455b-9edc-37a62af6e732"). InnerVolumeSpecName "kube-api-access-whz8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.874509 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-scripts" (OuterVolumeSpecName: "scripts") pod "e6063f78-1b45-493e-ae25-62239a1ed5e3" (UID: "e6063f78-1b45-493e-ae25-62239a1ed5e3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.874970 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e42757b3-029e-4fe9-917f-73331394524e-scripts" (OuterVolumeSpecName: "scripts") pod "e42757b3-029e-4fe9-917f-73331394524e" (UID: "e42757b3-029e-4fe9-917f-73331394524e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.876758 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3428207-2cb4-47d8-b4d8-941c3a4928fb-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "e3428207-2cb4-47d8-b4d8-941c3a4928fb" (UID: "e3428207-2cb4-47d8-b4d8-941c3a4928fb"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.879034 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-5f7476cfc7-5r2mm"] Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.883050 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-proxy-5f7476cfc7-5r2mm"] Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.884168 4742 scope.go:117] "RemoveContainer" containerID="43f59b0dd0673acbf1e8b1cb19d732574d31c64af500c88a90c6c1409d92d526" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.911145 4742 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.933285 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6a5ca1f6-73b0-43da-82c6-995495666585-operator-scripts\") pod \"6a5ca1f6-73b0-43da-82c6-995495666585\" (UID: \"6a5ca1f6-73b0-43da-82c6-995495666585\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.933603 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa702931-d853-4f8b-b0d8-58f5476bb7c2-combined-ca-bundle\") pod \"aa702931-d853-4f8b-b0d8-58f5476bb7c2\" (UID: \"aa702931-d853-4f8b-b0d8-58f5476bb7c2\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.934647 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a5ca1f6-73b0-43da-82c6-995495666585-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6a5ca1f6-73b0-43da-82c6-995495666585" (UID: "6a5ca1f6-73b0-43da-82c6-995495666585"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.934845 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7038cd99-8151-4157-93c6-3b7f5b9ce25e-logs" (OuterVolumeSpecName: "logs") pod "7038cd99-8151-4157-93c6-3b7f5b9ce25e" (UID: "7038cd99-8151-4157-93c6-3b7f5b9ce25e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.934387 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7038cd99-8151-4157-93c6-3b7f5b9ce25e-logs\") pod \"7038cd99-8151-4157-93c6-3b7f5b9ce25e\" (UID: \"7038cd99-8151-4157-93c6-3b7f5b9ce25e\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.935005 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7038cd99-8151-4157-93c6-3b7f5b9ce25e-config-data\") pod \"7038cd99-8151-4157-93c6-3b7f5b9ce25e\" (UID: \"7038cd99-8151-4157-93c6-3b7f5b9ce25e\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.935749 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ec392288-7e80-4956-836c-d400d4460ebc-config-data\") pod \"ec392288-7e80-4956-836c-d400d4460ebc\" (UID: \"ec392288-7e80-4956-836c-d400d4460ebc\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.935803 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa702931-d853-4f8b-b0d8-58f5476bb7c2-internal-tls-certs\") pod \"aa702931-d853-4f8b-b0d8-58f5476bb7c2\" (UID: \"aa702931-d853-4f8b-b0d8-58f5476bb7c2\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.936800 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7l9tj\" (UniqueName: \"kubernetes.io/projected/c4227032-1b4c-4059-b91f-cf5ece6b20b2-kube-api-access-7l9tj\") pod \"c4227032-1b4c-4059-b91f-cf5ece6b20b2\" (UID: \"c4227032-1b4c-4059-b91f-cf5ece6b20b2\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.936863 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f2ab762-07a0-426d-a84a-a53ad7e2fef0-operator-scripts\") pod \"4f2ab762-07a0-426d-a84a-a53ad7e2fef0\" (UID: \"4f2ab762-07a0-426d-a84a-a53ad7e2fef0\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.936900 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec392288-7e80-4956-836c-d400d4460ebc-combined-ca-bundle\") pod \"ec392288-7e80-4956-836c-d400d4460ebc\" (UID: \"ec392288-7e80-4956-836c-d400d4460ebc\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.937338 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa702931-d853-4f8b-b0d8-58f5476bb7c2-logs" (OuterVolumeSpecName: "logs") pod "aa702931-d853-4f8b-b0d8-58f5476bb7c2" (UID: "aa702931-d853-4f8b-b0d8-58f5476bb7c2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.937746 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec392288-7e80-4956-836c-d400d4460ebc-config-data" (OuterVolumeSpecName: "config-data") pod "ec392288-7e80-4956-836c-d400d4460ebc" (UID: "ec392288-7e80-4956-836c-d400d4460ebc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.936944 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa702931-d853-4f8b-b0d8-58f5476bb7c2-logs\") pod \"aa702931-d853-4f8b-b0d8-58f5476bb7c2\" (UID: \"aa702931-d853-4f8b-b0d8-58f5476bb7c2\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.937849 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-744sh\" (UniqueName: \"kubernetes.io/projected/aa702931-d853-4f8b-b0d8-58f5476bb7c2-kube-api-access-744sh\") pod \"aa702931-d853-4f8b-b0d8-58f5476bb7c2\" (UID: \"aa702931-d853-4f8b-b0d8-58f5476bb7c2\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.938632 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4227032-1b4c-4059-b91f-cf5ece6b20b2-operator-scripts\") pod \"c4227032-1b4c-4059-b91f-cf5ece6b20b2\" (UID: \"c4227032-1b4c-4059-b91f-cf5ece6b20b2\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.938677 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ec392288-7e80-4956-836c-d400d4460ebc-kolla-config\") pod \"ec392288-7e80-4956-836c-d400d4460ebc\" (UID: \"ec392288-7e80-4956-836c-d400d4460ebc\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.938703 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7038cd99-8151-4157-93c6-3b7f5b9ce25e-nova-metadata-tls-certs\") pod \"7038cd99-8151-4157-93c6-3b7f5b9ce25e\" (UID: \"7038cd99-8151-4157-93c6-3b7f5b9ce25e\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.938770 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa702931-d853-4f8b-b0d8-58f5476bb7c2-public-tls-certs\") pod \"aa702931-d853-4f8b-b0d8-58f5476bb7c2\" (UID: \"aa702931-d853-4f8b-b0d8-58f5476bb7c2\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.938808 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8qp5w\" (UniqueName: \"kubernetes.io/projected/4f2ab762-07a0-426d-a84a-a53ad7e2fef0-kube-api-access-8qp5w\") pod \"4f2ab762-07a0-426d-a84a-a53ad7e2fef0\" (UID: \"4f2ab762-07a0-426d-a84a-a53ad7e2fef0\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.938848 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xdg88\" (UniqueName: \"kubernetes.io/projected/ec392288-7e80-4956-836c-d400d4460ebc-kube-api-access-xdg88\") pod \"ec392288-7e80-4956-836c-d400d4460ebc\" (UID: \"ec392288-7e80-4956-836c-d400d4460ebc\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.938875 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec392288-7e80-4956-836c-d400d4460ebc-memcached-tls-certs\") pod \"ec392288-7e80-4956-836c-d400d4460ebc\" (UID: \"ec392288-7e80-4956-836c-d400d4460ebc\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.938904 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7038cd99-8151-4157-93c6-3b7f5b9ce25e-combined-ca-bundle\") pod \"7038cd99-8151-4157-93c6-3b7f5b9ce25e\" (UID: \"7038cd99-8151-4157-93c6-3b7f5b9ce25e\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.938948 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa702931-d853-4f8b-b0d8-58f5476bb7c2-config-data\") pod \"aa702931-d853-4f8b-b0d8-58f5476bb7c2\" (UID: \"aa702931-d853-4f8b-b0d8-58f5476bb7c2\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.938993 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pn9h6\" (UniqueName: \"kubernetes.io/projected/7038cd99-8151-4157-93c6-3b7f5b9ce25e-kube-api-access-pn9h6\") pod \"7038cd99-8151-4157-93c6-3b7f5b9ce25e\" (UID: \"7038cd99-8151-4157-93c6-3b7f5b9ce25e\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.939018 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-45v2w\" (UniqueName: \"kubernetes.io/projected/6a5ca1f6-73b0-43da-82c6-995495666585-kube-api-access-45v2w\") pod \"6a5ca1f6-73b0-43da-82c6-995495666585\" (UID: \"6a5ca1f6-73b0-43da-82c6-995495666585\") " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.939664 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f2ab762-07a0-426d-a84a-a53ad7e2fef0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4f2ab762-07a0-426d-a84a-a53ad7e2fef0" (UID: "4f2ab762-07a0-426d-a84a-a53ad7e2fef0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.950793 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa702931-d853-4f8b-b0d8-58f5476bb7c2-kube-api-access-744sh" (OuterVolumeSpecName: "kube-api-access-744sh") pod "aa702931-d853-4f8b-b0d8-58f5476bb7c2" (UID: "aa702931-d853-4f8b-b0d8-58f5476bb7c2"). InnerVolumeSpecName "kube-api-access-744sh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.951216 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4227032-1b4c-4059-b91f-cf5ece6b20b2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c4227032-1b4c-4059-b91f-cf5ece6b20b2" (UID: "c4227032-1b4c-4059-b91f-cf5ece6b20b2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.951454 4742 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6063f78-1b45-493e-ae25-62239a1ed5e3-logs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.951527 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ec392288-7e80-4956-836c-d400d4460ebc-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.951542 4742 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f2ab762-07a0-426d-a84a-a53ad7e2fef0-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.951555 4742 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e6063f78-1b45-493e-ae25-62239a1ed5e3-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.951568 4742 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa702931-d853-4f8b-b0d8-58f5476bb7c2-logs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.951577 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-whz8j\" (UniqueName: \"kubernetes.io/projected/fb4dce96-8228-455b-9edc-37a62af6e732-kube-api-access-whz8j\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.951587 4742 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7a764d5-447f-483d-b819-0e398e749600-logs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.951597 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c8fbr\" (UniqueName: \"kubernetes.io/projected/d7a764d5-447f-483d-b819-0e398e749600-kube-api-access-c8fbr\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.951608 4742 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.951618 4742 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e9974486-076d-4493-af32-a08eef334572-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.951679 4742 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.951707 4742 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e3428207-2cb4-47d8-b4d8-941c3a4928fb-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.951720 4742 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.951752 4742 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e42757b3-029e-4fe9-917f-73331394524e-logs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.951763 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cwdc9\" (UniqueName: \"kubernetes.io/projected/e42757b3-029e-4fe9-917f-73331394524e-kube-api-access-cwdc9\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.951774 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.951783 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d7a764d5-447f-483d-b819-0e398e749600-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.951791 4742 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d7a764d5-447f-483d-b819-0e398e749600-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.951800 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e42757b3-029e-4fe9-917f-73331394524e-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.951830 4742 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6a5ca1f6-73b0-43da-82c6-995495666585-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.951840 4742 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9974486-076d-4493-af32-a08eef334572-logs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.951849 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v88cc\" (UniqueName: \"kubernetes.io/projected/e9974486-076d-4493-af32-a08eef334572-kube-api-access-v88cc\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.951927 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hl9lc\" (UniqueName: \"kubernetes.io/projected/e6063f78-1b45-493e-ae25-62239a1ed5e3-kube-api-access-hl9lc\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.951946 4742 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7038cd99-8151-4157-93c6-3b7f5b9ce25e-logs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.952122 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec392288-7e80-4956-836c-d400d4460ebc-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "ec392288-7e80-4956-836c-d400d4460ebc" (UID: "ec392288-7e80-4956-836c-d400d4460ebc"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.952149 4742 scope.go:117] "RemoveContainer" containerID="b277c72b70d74360bc1397123819d6812ef188799046cfdc94d0b900a6266650" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.952190 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a5ca1f6-73b0-43da-82c6-995495666585-kube-api-access-45v2w" (OuterVolumeSpecName: "kube-api-access-45v2w") pod "6a5ca1f6-73b0-43da-82c6-995495666585" (UID: "6a5ca1f6-73b0-43da-82c6-995495666585"). InnerVolumeSpecName "kube-api-access-45v2w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.952847 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e6063f78-1b45-493e-ae25-62239a1ed5e3" (UID: "e6063f78-1b45-493e-ae25-62239a1ed5e3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.955472 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4227032-1b4c-4059-b91f-cf5ece6b20b2-kube-api-access-7l9tj" (OuterVolumeSpecName: "kube-api-access-7l9tj") pod "c4227032-1b4c-4059-b91f-cf5ece6b20b2" (UID: "c4227032-1b4c-4059-b91f-cf5ece6b20b2"). InnerVolumeSpecName "kube-api-access-7l9tj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.974927 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f2ab762-07a0-426d-a84a-a53ad7e2fef0-kube-api-access-8qp5w" (OuterVolumeSpecName: "kube-api-access-8qp5w") pod "4f2ab762-07a0-426d-a84a-a53ad7e2fef0" (UID: "4f2ab762-07a0-426d-a84a-a53ad7e2fef0"). InnerVolumeSpecName "kube-api-access-8qp5w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.976434 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e3428207-2cb4-47d8-b4d8-941c3a4928fb","Type":"ContainerDied","Data":"89916a0d2970c36517e36127fd05ef598ea49b28c0fc274bbda2e3eb1c99b5cc"} Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.976533 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.987888 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec392288-7e80-4956-836c-d400d4460ebc-kube-api-access-xdg88" (OuterVolumeSpecName: "kube-api-access-xdg88") pod "ec392288-7e80-4956-836c-d400d4460ebc" (UID: "ec392288-7e80-4956-836c-d400d4460ebc"). InnerVolumeSpecName "kube-api-access-xdg88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.987967 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7038cd99-8151-4157-93c6-3b7f5b9ce25e-kube-api-access-pn9h6" (OuterVolumeSpecName: "kube-api-access-pn9h6") pod "7038cd99-8151-4157-93c6-3b7f5b9ce25e" (UID: "7038cd99-8151-4157-93c6-3b7f5b9ce25e"). InnerVolumeSpecName "kube-api-access-pn9h6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.990769 4742 generic.go:334] "Generic (PLEG): container finished" podID="ec392288-7e80-4956-836c-d400d4460ebc" containerID="b91b925cb775f3c104773cf1cf7bcfec00234be6262a07996845dc0aa637e20b" exitCode=0 Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.990829 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"ec392288-7e80-4956-836c-d400d4460ebc","Type":"ContainerDied","Data":"b91b925cb775f3c104773cf1cf7bcfec00234be6262a07996845dc0aa637e20b"} Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.990855 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"ec392288-7e80-4956-836c-d400d4460ebc","Type":"ContainerDied","Data":"0f4a8968ef0375fccb1d5feb3cd34d4bdfb99d2a64520a10ccc06f1e057b030e"} Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.990931 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.996740 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5b594b6ccb-vbxpj" Dec 05 06:15:23 crc kubenswrapper[4742]: I1205 06:15:23.996769 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5b594b6ccb-vbxpj" event={"ID":"e9974486-076d-4493-af32-a08eef334572","Type":"ContainerDied","Data":"46de7901bfdf9ac1953f8ebd96789e43e6d66b197c2a14f635ad4b7a25f3e3e8"} Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:23.999967 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell12c4b-account-delete-hgg9m" event={"ID":"42c1f939-2d9c-4a8d-a341-cbce22551d58","Type":"ContainerDied","Data":"30d0d66a335bd4adc9b6f3ccaa7d5b82e84e19e5a5613d9ae3958b36bc7a62af"} Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.000889 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell12c4b-account-delete-hgg9m" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.016314 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"fb4dce96-8228-455b-9edc-37a62af6e732","Type":"ContainerDied","Data":"885fd16106f99af98697418992ad069dd83cab998ea801688b659e57f0ca4423"} Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.017376 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.031562 4742 scope.go:117] "RemoveContainer" containerID="43f59b0dd0673acbf1e8b1cb19d732574d31c64af500c88a90c6c1409d92d526" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.031617 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron5be2-account-delete-l9sfl" event={"ID":"4f2ab762-07a0-426d-a84a-a53ad7e2fef0","Type":"ContainerDied","Data":"6d886ffde936b3bedb2d5ad14181164ead61f68ee3c2274575a845e381579ea6"} Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.038714 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6d886ffde936b3bedb2d5ad14181164ead61f68ee3c2274575a845e381579ea6" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.031720 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron5be2-account-delete-l9sfl" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.039550 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-765b847d64-jgxg4" event={"ID":"e42757b3-029e-4fe9-917f-73331394524e","Type":"ContainerDied","Data":"7bb76af7c587ed7791faf68a07caf2574f40d34888b30121d2ce7c723a1dd3d6"} Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.039644 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-765b847d64-jgxg4" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.048320 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"e6063f78-1b45-493e-ae25-62239a1ed5e3","Type":"ContainerDied","Data":"e90b9897da1b55b6f2e78939d7972079d11bcc6b94d139a1ddde93d459b419a6"} Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.048383 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.050930 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7038cd99-8151-4157-93c6-3b7f5b9ce25e","Type":"ContainerDied","Data":"d7387779bc1bd76b8ee3f45aaae09d36d4c74b32b6e2a8f786877661d91ac905"} Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.051008 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.053577 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/672ee527-1fdd-4abc-b327-6f9eb6b07080-operator-scripts\") pod \"keystone7678-account-delete-n6gdz\" (UID: \"672ee527-1fdd-4abc-b327-6f9eb6b07080\") " pod="openstack/keystone7678-account-delete-n6gdz" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.053633 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4nj9\" (UniqueName: \"kubernetes.io/projected/672ee527-1fdd-4abc-b327-6f9eb6b07080-kube-api-access-r4nj9\") pod \"keystone7678-account-delete-n6gdz\" (UID: \"672ee527-1fdd-4abc-b327-6f9eb6b07080\") " pod="openstack/keystone7678-account-delete-n6gdz" Dec 05 06:15:24 crc kubenswrapper[4742]: E1205 06:15:24.053669 4742 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Dec 05 06:15:24 crc kubenswrapper[4742]: E1205 06:15:24.053714 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/672ee527-1fdd-4abc-b327-6f9eb6b07080-operator-scripts podName:672ee527-1fdd-4abc-b327-6f9eb6b07080 nodeName:}" failed. No retries permitted until 2025-12-05 06:15:26.053698504 +0000 UTC m=+1401.965833566 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/672ee527-1fdd-4abc-b327-6f9eb6b07080-operator-scripts") pod "keystone7678-account-delete-n6gdz" (UID: "672ee527-1fdd-4abc-b327-6f9eb6b07080") : configmap "openstack-scripts" not found Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.053776 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7l9tj\" (UniqueName: \"kubernetes.io/projected/c4227032-1b4c-4059-b91f-cf5ece6b20b2-kube-api-access-7l9tj\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.053795 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-744sh\" (UniqueName: \"kubernetes.io/projected/aa702931-d853-4f8b-b0d8-58f5476bb7c2-kube-api-access-744sh\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.053804 4742 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4227032-1b4c-4059-b91f-cf5ece6b20b2-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.053813 4742 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ec392288-7e80-4956-836c-d400d4460ebc-kolla-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.053825 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8qp5w\" (UniqueName: \"kubernetes.io/projected/4f2ab762-07a0-426d-a84a-a53ad7e2fef0-kube-api-access-8qp5w\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.053835 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.053844 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xdg88\" (UniqueName: \"kubernetes.io/projected/ec392288-7e80-4956-836c-d400d4460ebc-kube-api-access-xdg88\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.053853 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pn9h6\" (UniqueName: \"kubernetes.io/projected/7038cd99-8151-4157-93c6-3b7f5b9ce25e-kube-api-access-pn9h6\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.053862 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-45v2w\" (UniqueName: \"kubernetes.io/projected/6a5ca1f6-73b0-43da-82c6-995495666585-kube-api-access-45v2w\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.054638 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"d7a764d5-447f-483d-b819-0e398e749600","Type":"ContainerDied","Data":"7beb11964c2be510ceb27b8bbc83c23f39a9eb81f6974352e9728807725b3cf7"} Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.054777 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 06:15:24 crc kubenswrapper[4742]: E1205 06:15:24.056424 4742 projected.go:194] Error preparing data for projected volume kube-api-access-r4nj9 for pod openstack/keystone7678-account-delete-n6gdz: failed to fetch token: serviceaccounts "galera-openstack" not found Dec 05 06:15:24 crc kubenswrapper[4742]: E1205 06:15:24.056495 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/672ee527-1fdd-4abc-b327-6f9eb6b07080-kube-api-access-r4nj9 podName:672ee527-1fdd-4abc-b327-6f9eb6b07080 nodeName:}" failed. No retries permitted until 2025-12-05 06:15:26.056482058 +0000 UTC m=+1401.968617120 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-r4nj9" (UniqueName: "kubernetes.io/projected/672ee527-1fdd-4abc-b327-6f9eb6b07080-kube-api-access-r4nj9") pod "keystone7678-account-delete-n6gdz" (UID: "672ee527-1fdd-4abc-b327-6f9eb6b07080") : failed to fetch token: serviceaccounts "galera-openstack" not found Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.059450 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinderedde-account-delete-pwsvp" event={"ID":"6a5ca1f6-73b0-43da-82c6-995495666585","Type":"ContainerDied","Data":"d1d24811eef2a778824a8cac36c1ca4c8307de8f31b1b54e816c83914dee86d8"} Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.059959 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d1d24811eef2a778824a8cac36c1ca4c8307de8f31b1b54e816c83914dee86d8" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.060079 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinderedde-account-delete-pwsvp" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.065596 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glancebc83-account-delete-wqtlb" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.066459 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glancebc83-account-delete-wqtlb" event={"ID":"c4227032-1b4c-4059-b91f-cf5ece6b20b2","Type":"ContainerDied","Data":"6ec6f8accf2fbb4a89f53a19b1dbf89057fd23c0b3c6a80d6eb3b0f70c54fd70"} Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.066616 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6ec6f8accf2fbb4a89f53a19b1dbf89057fd23c0b3c6a80d6eb3b0f70c54fd70" Dec 05 06:15:24 crc kubenswrapper[4742]: E1205 06:15:24.073825 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43f59b0dd0673acbf1e8b1cb19d732574d31c64af500c88a90c6c1409d92d526\": container with ID starting with 43f59b0dd0673acbf1e8b1cb19d732574d31c64af500c88a90c6c1409d92d526 not found: ID does not exist" containerID="43f59b0dd0673acbf1e8b1cb19d732574d31c64af500c88a90c6c1409d92d526" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.073950 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43f59b0dd0673acbf1e8b1cb19d732574d31c64af500c88a90c6c1409d92d526"} err="failed to get container status \"43f59b0dd0673acbf1e8b1cb19d732574d31c64af500c88a90c6c1409d92d526\": rpc error: code = NotFound desc = could not find container \"43f59b0dd0673acbf1e8b1cb19d732574d31c64af500c88a90c6c1409d92d526\": container with ID starting with 43f59b0dd0673acbf1e8b1cb19d732574d31c64af500c88a90c6c1409d92d526 not found: ID does not exist" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.074025 4742 scope.go:117] "RemoveContainer" containerID="b277c72b70d74360bc1397123819d6812ef188799046cfdc94d0b900a6266650" Dec 05 06:15:24 crc kubenswrapper[4742]: E1205 06:15:24.075856 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b277c72b70d74360bc1397123819d6812ef188799046cfdc94d0b900a6266650\": container with ID starting with b277c72b70d74360bc1397123819d6812ef188799046cfdc94d0b900a6266650 not found: ID does not exist" containerID="b277c72b70d74360bc1397123819d6812ef188799046cfdc94d0b900a6266650" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.078201 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b277c72b70d74360bc1397123819d6812ef188799046cfdc94d0b900a6266650"} err="failed to get container status \"b277c72b70d74360bc1397123819d6812ef188799046cfdc94d0b900a6266650\": rpc error: code = NotFound desc = could not find container \"b277c72b70d74360bc1397123819d6812ef188799046cfdc94d0b900a6266650\": container with ID starting with b277c72b70d74360bc1397123819d6812ef188799046cfdc94d0b900a6266650 not found: ID does not exist" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.078246 4742 scope.go:117] "RemoveContainer" containerID="19a84075a4a5b72bcffa6b3cad99511d70c5764348b285fdef52052f3200d9ee" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.085650 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.085766 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"aa702931-d853-4f8b-b0d8-58f5476bb7c2","Type":"ContainerDied","Data":"c5adff18ce5b2b510dd2ad7b169557ca839448cbbf73eb1e6c98882439081a32"} Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.088339 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone7678-account-delete-n6gdz" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.089104 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.123707 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7038cd99-8151-4157-93c6-3b7f5b9ce25e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7038cd99-8151-4157-93c6-3b7f5b9ce25e" (UID: "7038cd99-8151-4157-93c6-3b7f5b9ce25e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.137981 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.150182 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.157130 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7038cd99-8151-4157-93c6-3b7f5b9ce25e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.158754 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell12c4b-account-delete-hgg9m"] Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.161901 4742 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.166251 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "e6063f78-1b45-493e-ae25-62239a1ed5e3" (UID: "e6063f78-1b45-493e-ae25-62239a1ed5e3"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.174767 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novacell12c4b-account-delete-hgg9m"] Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.189630 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone7678-account-delete-n6gdz" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.216064 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa702931-d853-4f8b-b0d8-58f5476bb7c2-config-data" (OuterVolumeSpecName: "config-data") pod "aa702931-d853-4f8b-b0d8-58f5476bb7c2" (UID: "aa702931-d853-4f8b-b0d8-58f5476bb7c2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.240634 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.246999 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.254708 4742 scope.go:117] "RemoveContainer" containerID="19a84075a4a5b72bcffa6b3cad99511d70c5764348b285fdef52052f3200d9ee" Dec 05 06:15:24 crc kubenswrapper[4742]: E1205 06:15:24.255181 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19a84075a4a5b72bcffa6b3cad99511d70c5764348b285fdef52052f3200d9ee\": container with ID starting with 19a84075a4a5b72bcffa6b3cad99511d70c5764348b285fdef52052f3200d9ee not found: ID does not exist" containerID="19a84075a4a5b72bcffa6b3cad99511d70c5764348b285fdef52052f3200d9ee" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.255222 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19a84075a4a5b72bcffa6b3cad99511d70c5764348b285fdef52052f3200d9ee"} err="failed to get container status \"19a84075a4a5b72bcffa6b3cad99511d70c5764348b285fdef52052f3200d9ee\": rpc error: code = NotFound desc = could not find container \"19a84075a4a5b72bcffa6b3cad99511d70c5764348b285fdef52052f3200d9ee\": container with ID starting with 19a84075a4a5b72bcffa6b3cad99511d70c5764348b285fdef52052f3200d9ee not found: ID does not exist" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.255254 4742 scope.go:117] "RemoveContainer" containerID="b8f02737722d7ebc14c897ea39f901ead1646c8d5e8658a44265bfe41044eed8" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.262736 4742 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.262759 4742 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.262769 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa702931-d853-4f8b-b0d8-58f5476bb7c2-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: E1205 06:15:24.262872 4742 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Dec 05 06:15:24 crc kubenswrapper[4742]: E1205 06:15:24.262915 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/7b5d8165-e06e-4600-9cab-9cf84c010725-config-data podName:7b5d8165-e06e-4600-9cab-9cf84c010725 nodeName:}" failed. No retries permitted until 2025-12-05 06:15:32.262901524 +0000 UTC m=+1408.175036586 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/7b5d8165-e06e-4600-9cab-9cf84c010725-config-data") pod "rabbitmq-cell1-server-0" (UID: "7b5d8165-e06e-4600-9cab-9cf84c010725") : configmap "rabbitmq-cell1-config-data" not found Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.268803 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e42757b3-029e-4fe9-917f-73331394524e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e42757b3-029e-4fe9-917f-73331394524e" (UID: "e42757b3-029e-4fe9-917f-73331394524e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.269952 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "e6063f78-1b45-493e-ae25-62239a1ed5e3" (UID: "e6063f78-1b45-493e-ae25-62239a1ed5e3"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.278275 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9974486-076d-4493-af32-a08eef334572-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "e9974486-076d-4493-af32-a08eef334572" (UID: "e9974486-076d-4493-af32-a08eef334572"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.282682 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec392288-7e80-4956-836c-d400d4460ebc-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "ec392288-7e80-4956-836c-d400d4460ebc" (UID: "ec392288-7e80-4956-836c-d400d4460ebc"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.288025 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec392288-7e80-4956-836c-d400d4460ebc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ec392288-7e80-4956-836c-d400d4460ebc" (UID: "ec392288-7e80-4956-836c-d400d4460ebc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.290869 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb4dce96-8228-455b-9edc-37a62af6e732-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fb4dce96-8228-455b-9edc-37a62af6e732" (UID: "fb4dce96-8228-455b-9edc-37a62af6e732"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.294889 4742 scope.go:117] "RemoveContainer" containerID="c578f580ac4c94f28399a0f7e39da62ca4fb8496169c7001d2053863082caf1f" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.295863 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e42757b3-029e-4fe9-917f-73331394524e-config-data" (OuterVolumeSpecName: "config-data") pod "e42757b3-029e-4fe9-917f-73331394524e" (UID: "e42757b3-029e-4fe9-917f-73331394524e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.296253 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9974486-076d-4493-af32-a08eef334572-config-data" (OuterVolumeSpecName: "config-data") pod "e9974486-076d-4493-af32-a08eef334572" (UID: "e9974486-076d-4493-af32-a08eef334572"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.314402 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb4dce96-8228-455b-9edc-37a62af6e732-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "fb4dce96-8228-455b-9edc-37a62af6e732" (UID: "fb4dce96-8228-455b-9edc-37a62af6e732"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.338852 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.339678 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb4dce96-8228-455b-9edc-37a62af6e732-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "fb4dce96-8228-455b-9edc-37a62af6e732" (UID: "fb4dce96-8228-455b-9edc-37a62af6e732"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.346773 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7038cd99-8151-4157-93c6-3b7f5b9ce25e-config-data" (OuterVolumeSpecName: "config-data") pod "7038cd99-8151-4157-93c6-3b7f5b9ce25e" (UID: "7038cd99-8151-4157-93c6-3b7f5b9ce25e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.346866 4742 scope.go:117] "RemoveContainer" containerID="b91b925cb775f3c104773cf1cf7bcfec00234be6262a07996845dc0aa637e20b" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.348267 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/memcached-0"] Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.351934 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e42757b3-029e-4fe9-917f-73331394524e-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "e42757b3-029e-4fe9-917f-73331394524e" (UID: "e42757b3-029e-4fe9-917f-73331394524e"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.361143 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa702931-d853-4f8b-b0d8-58f5476bb7c2-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "aa702931-d853-4f8b-b0d8-58f5476bb7c2" (UID: "aa702931-d853-4f8b-b0d8-58f5476bb7c2"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.361774 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7a764d5-447f-483d-b819-0e398e749600-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "d7a764d5-447f-483d-b819-0e398e749600" (UID: "d7a764d5-447f-483d-b819-0e398e749600"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.362959 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-config-data" (OuterVolumeSpecName: "config-data") pod "e6063f78-1b45-493e-ae25-62239a1ed5e3" (UID: "e6063f78-1b45-493e-ae25-62239a1ed5e3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.364083 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e42757b3-029e-4fe9-917f-73331394524e-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.364100 4742 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9974486-076d-4493-af32-a08eef334572-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.364111 4742 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/fb4dce96-8228-455b-9edc-37a62af6e732-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.364120 4742 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7a764d5-447f-483d-b819-0e398e749600-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.364130 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7038cd99-8151-4157-93c6-3b7f5b9ce25e-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.364139 4742 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.364149 4742 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb4dce96-8228-455b-9edc-37a62af6e732-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.364160 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb4dce96-8228-455b-9edc-37a62af6e732-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.364168 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec392288-7e80-4956-836c-d400d4460ebc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.364175 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9974486-076d-4493-af32-a08eef334572-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.364183 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6063f78-1b45-493e-ae25-62239a1ed5e3-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.364190 4742 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e42757b3-029e-4fe9-917f-73331394524e-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.364199 4742 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa702931-d853-4f8b-b0d8-58f5476bb7c2-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.364207 4742 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/ec392288-7e80-4956-836c-d400d4460ebc-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.364216 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e42757b3-029e-4fe9-917f-73331394524e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.367705 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7a764d5-447f-483d-b819-0e398e749600-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d7a764d5-447f-483d-b819-0e398e749600" (UID: "d7a764d5-447f-483d-b819-0e398e749600"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.375236 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7038cd99-8151-4157-93c6-3b7f5b9ce25e-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "7038cd99-8151-4157-93c6-3b7f5b9ce25e" (UID: "7038cd99-8151-4157-93c6-3b7f5b9ce25e"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.379225 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa702931-d853-4f8b-b0d8-58f5476bb7c2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aa702931-d853-4f8b-b0d8-58f5476bb7c2" (UID: "aa702931-d853-4f8b-b0d8-58f5476bb7c2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.379270 4742 scope.go:117] "RemoveContainer" containerID="b91b925cb775f3c104773cf1cf7bcfec00234be6262a07996845dc0aa637e20b" Dec 05 06:15:24 crc kubenswrapper[4742]: E1205 06:15:24.379699 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b91b925cb775f3c104773cf1cf7bcfec00234be6262a07996845dc0aa637e20b\": container with ID starting with b91b925cb775f3c104773cf1cf7bcfec00234be6262a07996845dc0aa637e20b not found: ID does not exist" containerID="b91b925cb775f3c104773cf1cf7bcfec00234be6262a07996845dc0aa637e20b" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.379734 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b91b925cb775f3c104773cf1cf7bcfec00234be6262a07996845dc0aa637e20b"} err="failed to get container status \"b91b925cb775f3c104773cf1cf7bcfec00234be6262a07996845dc0aa637e20b\": rpc error: code = NotFound desc = could not find container \"b91b925cb775f3c104773cf1cf7bcfec00234be6262a07996845dc0aa637e20b\": container with ID starting with b91b925cb775f3c104773cf1cf7bcfec00234be6262a07996845dc0aa637e20b not found: ID does not exist" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.379761 4742 scope.go:117] "RemoveContainer" containerID="1d507c229540319f85af5a5bb49cd7bea47d3c4c4e80bec322f63230f391811c" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.386128 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e42757b3-029e-4fe9-917f-73331394524e-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "e42757b3-029e-4fe9-917f-73331394524e" (UID: "e42757b3-029e-4fe9-917f-73331394524e"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.392309 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9974486-076d-4493-af32-a08eef334572-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e9974486-076d-4493-af32-a08eef334572" (UID: "e9974486-076d-4493-af32-a08eef334572"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.398303 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa702931-d853-4f8b-b0d8-58f5476bb7c2-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "aa702931-d853-4f8b-b0d8-58f5476bb7c2" (UID: "aa702931-d853-4f8b-b0d8-58f5476bb7c2"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.399864 4742 scope.go:117] "RemoveContainer" containerID="c58c31d14e6bc541855c1db0c9f365ab77a7e68becb2933d6d951d6d108a2537" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.401125 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24b6ea24-e4d2-42f2-8a10-720d0a3445e4" path="/var/lib/kubelet/pods/24b6ea24-e4d2-42f2-8a10-720d0a3445e4/volumes" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.401993 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="338b9928-12cd-4db4-806e-4f42612c5ab6" path="/var/lib/kubelet/pods/338b9928-12cd-4db4-806e-4f42612c5ab6/volumes" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.402496 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b535626-d96c-4843-bc25-c4fafa967b23" path="/var/lib/kubelet/pods/3b535626-d96c-4843-bc25-c4fafa967b23/volumes" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.404168 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f" path="/var/lib/kubelet/pods/3e04f8fa-308e-4a6e-ae84-5fc9a149ab1f/volumes" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.404721 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="42c1f939-2d9c-4a8d-a341-cbce22551d58" path="/var/lib/kubelet/pods/42c1f939-2d9c-4a8d-a341-cbce22551d58/volumes" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.405704 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4341c972-cfe7-4940-ad91-1f8a4d6138ab" path="/var/lib/kubelet/pods/4341c972-cfe7-4940-ad91-1f8a4d6138ab/volumes" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.406470 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66ddfb11-066b-41f6-8bd0-7248f3cc36ea" path="/var/lib/kubelet/pods/66ddfb11-066b-41f6-8bd0-7248f3cc36ea/volumes" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.409044 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77998b8e-507c-487f-8616-6fe17b8f9d9a" path="/var/lib/kubelet/pods/77998b8e-507c-487f-8616-6fe17b8f9d9a/volumes" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.409949 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85632fad-1ab6-495e-9049-6b5dad9cc955" path="/var/lib/kubelet/pods/85632fad-1ab6-495e-9049-6b5dad9cc955/volumes" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.410562 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ef1f42c-4004-49d9-9456-4d4df074004f" path="/var/lib/kubelet/pods/8ef1f42c-4004-49d9-9456-4d4df074004f/volumes" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.411865 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf9c88e4-e053-4594-bc1f-176035f2bff7" path="/var/lib/kubelet/pods/cf9c88e4-e053-4594-bc1f-176035f2bff7/volumes" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.412556 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e07b564a-eb31-4f88-ae69-44cceef519a4" path="/var/lib/kubelet/pods/e07b564a-eb31-4f88-ae69-44cceef519a4/volumes" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.413709 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3428207-2cb4-47d8-b4d8-941c3a4928fb" path="/var/lib/kubelet/pods/e3428207-2cb4-47d8-b4d8-941c3a4928fb/volumes" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.414002 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9974486-076d-4493-af32-a08eef334572-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "e9974486-076d-4493-af32-a08eef334572" (UID: "e9974486-076d-4493-af32-a08eef334572"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.415033 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec392288-7e80-4956-836c-d400d4460ebc" path="/var/lib/kubelet/pods/ec392288-7e80-4956-836c-d400d4460ebc/volumes" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.430910 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7a764d5-447f-483d-b819-0e398e749600-config-data" (OuterVolumeSpecName: "config-data") pod "d7a764d5-447f-483d-b819-0e398e749600" (UID: "d7a764d5-447f-483d-b819-0e398e749600"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.467864 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa702931-d853-4f8b-b0d8-58f5476bb7c2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.467891 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7a764d5-447f-483d-b819-0e398e749600-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.467901 4742 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa702931-d853-4f8b-b0d8-58f5476bb7c2-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.467910 4742 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e9974486-076d-4493-af32-a08eef334572-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.467919 4742 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7038cd99-8151-4157-93c6-3b7f5b9ce25e-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.467928 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7a764d5-447f-483d-b819-0e398e749600-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.467937 4742 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e42757b3-029e-4fe9-917f-73331394524e-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.467946 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9974486-076d-4493-af32-a08eef334572-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.475896 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbicanf9da-account-delete-4bv5f" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.481335 4742 scope.go:117] "RemoveContainer" containerID="3669c98b97b3e1829ea751201c3c1ec4d8ce17c91e62ad74bc0203a441ad7b9e" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.568734 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61c4b9e1-5266-49eb-8348-3b1034562185-operator-scripts\") pod \"61c4b9e1-5266-49eb-8348-3b1034562185\" (UID: \"61c4b9e1-5266-49eb-8348-3b1034562185\") " Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.568831 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ptthf\" (UniqueName: \"kubernetes.io/projected/61c4b9e1-5266-49eb-8348-3b1034562185-kube-api-access-ptthf\") pod \"61c4b9e1-5266-49eb-8348-3b1034562185\" (UID: \"61c4b9e1-5266-49eb-8348-3b1034562185\") " Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.569814 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61c4b9e1-5266-49eb-8348-3b1034562185-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "61c4b9e1-5266-49eb-8348-3b1034562185" (UID: "61c4b9e1-5266-49eb-8348-3b1034562185"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.572136 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61c4b9e1-5266-49eb-8348-3b1034562185-kube-api-access-ptthf" (OuterVolumeSpecName: "kube-api-access-ptthf") pod "61c4b9e1-5266-49eb-8348-3b1034562185" (UID: "61c4b9e1-5266-49eb-8348-3b1034562185"). InnerVolumeSpecName "kube-api-access-ptthf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.678963 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ptthf\" (UniqueName: \"kubernetes.io/projected/61c4b9e1-5266-49eb-8348-3b1034562185-kube-api-access-ptthf\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.679001 4742 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61c4b9e1-5266-49eb-8348-3b1034562185-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.742167 4742 scope.go:117] "RemoveContainer" containerID="63b79a58b1a6b877064a1e2ed25d6d589d4ca79328f599b17692a53d79df74d6" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.744523 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement9800-account-delete-mlmb7" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.748337 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novaapie0d5-account-delete-294ls" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.787101 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell036a3-account-delete-mmf6x" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.797002 4742 scope.go:117] "RemoveContainer" containerID="87ced7f756fbe6fb669f5837d287cbfc896ceb71b65ba49a0991a9a56aa7f8a6" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.882750 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n9p64\" (UniqueName: \"kubernetes.io/projected/8b956518-9768-477f-9acb-1fc3459427f7-kube-api-access-n9p64\") pod \"8b956518-9768-477f-9acb-1fc3459427f7\" (UID: \"8b956518-9768-477f-9acb-1fc3459427f7\") " Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.882894 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8hgkj\" (UniqueName: \"kubernetes.io/projected/5b2208e7-3101-4090-9f35-fba640d2f1d9-kube-api-access-8hgkj\") pod \"5b2208e7-3101-4090-9f35-fba640d2f1d9\" (UID: \"5b2208e7-3101-4090-9f35-fba640d2f1d9\") " Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.882985 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b956518-9768-477f-9acb-1fc3459427f7-operator-scripts\") pod \"8b956518-9768-477f-9acb-1fc3459427f7\" (UID: \"8b956518-9768-477f-9acb-1fc3459427f7\") " Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.883068 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a690523-b1e4-4dd5-b280-58fd8b91b3bf-operator-scripts\") pod \"1a690523-b1e4-4dd5-b280-58fd8b91b3bf\" (UID: \"1a690523-b1e4-4dd5-b280-58fd8b91b3bf\") " Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.883095 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rdrp4\" (UniqueName: \"kubernetes.io/projected/1a690523-b1e4-4dd5-b280-58fd8b91b3bf-kube-api-access-rdrp4\") pod \"1a690523-b1e4-4dd5-b280-58fd8b91b3bf\" (UID: \"1a690523-b1e4-4dd5-b280-58fd8b91b3bf\") " Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.883119 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5b2208e7-3101-4090-9f35-fba640d2f1d9-operator-scripts\") pod \"5b2208e7-3101-4090-9f35-fba640d2f1d9\" (UID: \"5b2208e7-3101-4090-9f35-fba640d2f1d9\") " Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.884001 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b956518-9768-477f-9acb-1fc3459427f7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8b956518-9768-477f-9acb-1fc3459427f7" (UID: "8b956518-9768-477f-9acb-1fc3459427f7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.884268 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a690523-b1e4-4dd5-b280-58fd8b91b3bf-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1a690523-b1e4-4dd5-b280-58fd8b91b3bf" (UID: "1a690523-b1e4-4dd5-b280-58fd8b91b3bf"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.884269 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b2208e7-3101-4090-9f35-fba640d2f1d9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5b2208e7-3101-4090-9f35-fba640d2f1d9" (UID: "5b2208e7-3101-4090-9f35-fba640d2f1d9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.902777 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a690523-b1e4-4dd5-b280-58fd8b91b3bf-kube-api-access-rdrp4" (OuterVolumeSpecName: "kube-api-access-rdrp4") pod "1a690523-b1e4-4dd5-b280-58fd8b91b3bf" (UID: "1a690523-b1e4-4dd5-b280-58fd8b91b3bf"). InnerVolumeSpecName "kube-api-access-rdrp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.903176 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b2208e7-3101-4090-9f35-fba640d2f1d9-kube-api-access-8hgkj" (OuterVolumeSpecName: "kube-api-access-8hgkj") pod "5b2208e7-3101-4090-9f35-fba640d2f1d9" (UID: "5b2208e7-3101-4090-9f35-fba640d2f1d9"). InnerVolumeSpecName "kube-api-access-8hgkj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.903879 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b956518-9768-477f-9acb-1fc3459427f7-kube-api-access-n9p64" (OuterVolumeSpecName: "kube-api-access-n9p64") pod "8b956518-9768-477f-9acb-1fc3459427f7" (UID: "8b956518-9768-477f-9acb-1fc3459427f7"). InnerVolumeSpecName "kube-api-access-n9p64". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.984770 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n9p64\" (UniqueName: \"kubernetes.io/projected/8b956518-9768-477f-9acb-1fc3459427f7-kube-api-access-n9p64\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.984795 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8hgkj\" (UniqueName: \"kubernetes.io/projected/5b2208e7-3101-4090-9f35-fba640d2f1d9-kube-api-access-8hgkj\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.984804 4742 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b956518-9768-477f-9acb-1fc3459427f7-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.984813 4742 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a690523-b1e4-4dd5-b280-58fd8b91b3bf-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.984822 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rdrp4\" (UniqueName: \"kubernetes.io/projected/1a690523-b1e4-4dd5-b280-58fd8b91b3bf-kube-api-access-rdrp4\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: I1205 06:15:24.984830 4742 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5b2208e7-3101-4090-9f35-fba640d2f1d9-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:24 crc kubenswrapper[4742]: E1205 06:15:24.984886 4742 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Dec 05 06:15:24 crc kubenswrapper[4742]: E1205 06:15:24.984928 4742 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-config-data podName:d6b096f4-483e-48c5-a3e1-a178c0c5ae6e nodeName:}" failed. No retries permitted until 2025-12-05 06:15:32.984914741 +0000 UTC m=+1408.897049803 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-config-data") pod "rabbitmq-server-0" (UID: "d6b096f4-483e-48c5-a3e1-a178c0c5ae6e") : configmap "rabbitmq-config-data" not found Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.030185 4742 scope.go:117] "RemoveContainer" containerID="23c49e25d43e79c7f8ade74991cb2aa015e0aae68fc08c3f8bd44099cbee5e4d" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.039398 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.049712 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.060879 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.083312 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.096394 4742 scope.go:117] "RemoveContainer" containerID="9ddfbe5ffee29c713d306ff006d773b5b100e240b7d408ad28e4d4bab8088896" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.102256 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.112945 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell036a3-account-delete-mmf6x" event={"ID":"5b2208e7-3101-4090-9f35-fba640d2f1d9","Type":"ContainerDied","Data":"33bbec5e4406ae9f140c27451e1c8802550957395f5f4f57b77c6a8a654b921c"} Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.113110 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell036a3-account-delete-mmf6x" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.117860 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbicanf9da-account-delete-4bv5f" event={"ID":"61c4b9e1-5266-49eb-8348-3b1034562185","Type":"ContainerDied","Data":"483d1bc42de90c15f97297df684f3dc97dd2bc83aa03249cb365cfe7c27d6ae0"} Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.117964 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="483d1bc42de90c15f97297df684f3dc97dd2bc83aa03249cb365cfe7c27d6ae0" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.118089 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbicanf9da-account-delete-4bv5f" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.134307 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapie0d5-account-delete-294ls" event={"ID":"1a690523-b1e4-4dd5-b280-58fd8b91b3bf","Type":"ContainerDied","Data":"694419fe80f2a74b277bf90929588ec4d678e3a527f5229dd86c95a2ec6b9190"} Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.134384 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novaapie0d5-account-delete-294ls" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.151671 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.158299 4742 scope.go:117] "RemoveContainer" containerID="873f59d3fccd6e3aebb3bd5b7bfff039e46a5ac7aa542e462b2ded2d505ccf92" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.162104 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.170241 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.173253 4742 generic.go:334] "Generic (PLEG): container finished" podID="7b5d8165-e06e-4600-9cab-9cf84c010725" containerID="4c9ed2559817c2da1b28311959a187477072585e1e74ef4ffe26d1ce23f9ee55" exitCode=0 Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.173314 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7b5d8165-e06e-4600-9cab-9cf84c010725","Type":"ContainerDied","Data":"4c9ed2559817c2da1b28311959a187477072585e1e74ef4ffe26d1ce23f9ee55"} Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.193577 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.194445 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4ba4170-0240-42d9-85f4-cf3587f39f02-galera-tls-certs\") pod \"c4ba4170-0240-42d9-85f4-cf3587f39f02\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") " Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.194500 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kbqk6\" (UniqueName: \"kubernetes.io/projected/c4ba4170-0240-42d9-85f4-cf3587f39f02-kube-api-access-kbqk6\") pod \"c4ba4170-0240-42d9-85f4-cf3587f39f02\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") " Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.194601 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4ba4170-0240-42d9-85f4-cf3587f39f02-operator-scripts\") pod \"c4ba4170-0240-42d9-85f4-cf3587f39f02\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") " Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.194663 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c4ba4170-0240-42d9-85f4-cf3587f39f02-kolla-config\") pod \"c4ba4170-0240-42d9-85f4-cf3587f39f02\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") " Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.194684 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4ba4170-0240-42d9-85f4-cf3587f39f02-combined-ca-bundle\") pod \"c4ba4170-0240-42d9-85f4-cf3587f39f02\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") " Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.194703 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c4ba4170-0240-42d9-85f4-cf3587f39f02-config-data-generated\") pod \"c4ba4170-0240-42d9-85f4-cf3587f39f02\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") " Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.194719 4742 scope.go:117] "RemoveContainer" containerID="4d42993853ddd3815008a6e598dffed4d9fa4416bef732ddaba8c8e33025a533" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.194732 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"c4ba4170-0240-42d9-85f4-cf3587f39f02\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") " Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.194778 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c4ba4170-0240-42d9-85f4-cf3587f39f02-config-data-default\") pod \"c4ba4170-0240-42d9-85f4-cf3587f39f02\" (UID: \"c4ba4170-0240-42d9-85f4-cf3587f39f02\") " Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.195581 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4ba4170-0240-42d9-85f4-cf3587f39f02-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "c4ba4170-0240-42d9-85f4-cf3587f39f02" (UID: "c4ba4170-0240-42d9-85f4-cf3587f39f02"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.195923 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement9800-account-delete-mlmb7" event={"ID":"8b956518-9768-477f-9acb-1fc3459427f7","Type":"ContainerDied","Data":"929912473a4e6168616da96a203323308ae8dec8da4b56cde10961befddafcc2"} Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.195951 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="929912473a4e6168616da96a203323308ae8dec8da4b56cde10961befddafcc2" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.195980 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement9800-account-delete-mlmb7" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.196158 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4ba4170-0240-42d9-85f4-cf3587f39f02-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "c4ba4170-0240-42d9-85f4-cf3587f39f02" (UID: "c4ba4170-0240-42d9-85f4-cf3587f39f02"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.196403 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4ba4170-0240-42d9-85f4-cf3587f39f02-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "c4ba4170-0240-42d9-85f4-cf3587f39f02" (UID: "c4ba4170-0240-42d9-85f4-cf3587f39f02"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.197735 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4ba4170-0240-42d9-85f4-cf3587f39f02-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c4ba4170-0240-42d9-85f4-cf3587f39f02" (UID: "c4ba4170-0240-42d9-85f4-cf3587f39f02"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.201245 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5b594b6ccb-vbxpj"] Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.202616 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_e7d76df0-4f21-4729-9729-1f2ff54a8332/ovn-northd/0.log" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.202653 4742 generic.go:334] "Generic (PLEG): container finished" podID="e7d76df0-4f21-4729-9729-1f2ff54a8332" containerID="caa9cfd6937fda940888bb64cbccaa6adf27580ea4e177e3d3adf4b5e4e8b93d" exitCode=139 Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.202703 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"e7d76df0-4f21-4729-9729-1f2ff54a8332","Type":"ContainerDied","Data":"caa9cfd6937fda940888bb64cbccaa6adf27580ea4e177e3d3adf4b5e4e8b93d"} Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.202900 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4ba4170-0240-42d9-85f4-cf3587f39f02-kube-api-access-kbqk6" (OuterVolumeSpecName: "kube-api-access-kbqk6") pod "c4ba4170-0240-42d9-85f4-cf3587f39f02" (UID: "c4ba4170-0240-42d9-85f4-cf3587f39f02"). InnerVolumeSpecName "kube-api-access-kbqk6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.213205 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-5b594b6ccb-vbxpj"] Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.216189 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "mysql-db") pod "c4ba4170-0240-42d9-85f4-cf3587f39f02" (UID: "c4ba4170-0240-42d9-85f4-cf3587f39f02"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.218345 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-765b847d64-jgxg4"] Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.220742 4742 scope.go:117] "RemoveContainer" containerID="5786613a0ea271c49e36cb812feba2d04a12fd32ef5d2c4e0ebfce2f557616b6" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.225137 4742 generic.go:334] "Generic (PLEG): container finished" podID="c4ba4170-0240-42d9-85f4-cf3587f39f02" containerID="d650e05da8f6d00412f038e1cc6a4768171d4ecaec420e4cd6c8c0a06f434838" exitCode=0 Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.225167 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.225191 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c4ba4170-0240-42d9-85f4-cf3587f39f02","Type":"ContainerDied","Data":"d650e05da8f6d00412f038e1cc6a4768171d4ecaec420e4cd6c8c0a06f434838"} Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.225214 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c4ba4170-0240-42d9-85f4-cf3587f39f02","Type":"ContainerDied","Data":"84b7e22e1ee9f346c9eedcc2067377b3434c6d31dc52ee00581e2a6a059af766"} Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.231832 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone7678-account-delete-n6gdz" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.245345 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-765b847d64-jgxg4"] Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.253138 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.258327 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_e7d76df0-4f21-4729-9729-1f2ff54a8332/ovn-northd/0.log" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.258406 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.269403 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4ba4170-0240-42d9-85f4-cf3587f39f02-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "c4ba4170-0240-42d9-85f4-cf3587f39f02" (UID: "c4ba4170-0240-42d9-85f4-cf3587f39f02"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.271320 4742 scope.go:117] "RemoveContainer" containerID="a1b30e5b41ae0a67e19767b1176483a9b711ab959c0c1007661ee4670c30e081" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.275988 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.297239 4742 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4ba4170-0240-42d9-85f4-cf3587f39f02-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.297267 4742 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c4ba4170-0240-42d9-85f4-cf3587f39f02-kolla-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.297280 4742 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c4ba4170-0240-42d9-85f4-cf3587f39f02-config-data-generated\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.297303 4742 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.297316 4742 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c4ba4170-0240-42d9-85f4-cf3587f39f02-config-data-default\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.297324 4742 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4ba4170-0240-42d9-85f4-cf3587f39f02-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.297332 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kbqk6\" (UniqueName: \"kubernetes.io/projected/c4ba4170-0240-42d9-85f4-cf3587f39f02-kube-api-access-kbqk6\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.315570 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4ba4170-0240-42d9-85f4-cf3587f39f02-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c4ba4170-0240-42d9-85f4-cf3587f39f02" (UID: "c4ba4170-0240-42d9-85f4-cf3587f39f02"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.316918 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapie0d5-account-delete-294ls"] Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.319101 4742 scope.go:117] "RemoveContainer" containerID="f455df6d411179859e60d3c9b127100c03c9bd439f8c01b9bb223b4b2bbfd0d5" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.331375 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-z77c8" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.360136 4742 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.368453 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novaapie0d5-account-delete-294ls"] Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.375286 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell036a3-account-delete-mmf6x"] Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.376081 4742 scope.go:117] "RemoveContainer" containerID="1f4b1e5b484c4b109f9165cf542665fc7a93e90318c7b4dd1ddb7da94d8a3032" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.377496 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-z77c8" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.380222 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novacell036a3-account-delete-mmf6x"] Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.384137 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.397758 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q2smw\" (UniqueName: \"kubernetes.io/projected/e7d76df0-4f21-4729-9729-1f2ff54a8332-kube-api-access-q2smw\") pod \"e7d76df0-4f21-4729-9729-1f2ff54a8332\" (UID: \"e7d76df0-4f21-4729-9729-1f2ff54a8332\") " Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.397823 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7d76df0-4f21-4729-9729-1f2ff54a8332-combined-ca-bundle\") pod \"e7d76df0-4f21-4729-9729-1f2ff54a8332\" (UID: \"e7d76df0-4f21-4729-9729-1f2ff54a8332\") " Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.397862 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7d76df0-4f21-4729-9729-1f2ff54a8332-config\") pod \"e7d76df0-4f21-4729-9729-1f2ff54a8332\" (UID: \"e7d76df0-4f21-4729-9729-1f2ff54a8332\") " Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.397926 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e7d76df0-4f21-4729-9729-1f2ff54a8332-ovn-rundir\") pod \"e7d76df0-4f21-4729-9729-1f2ff54a8332\" (UID: \"e7d76df0-4f21-4729-9729-1f2ff54a8332\") " Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.397998 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7d76df0-4f21-4729-9729-1f2ff54a8332-metrics-certs-tls-certs\") pod \"e7d76df0-4f21-4729-9729-1f2ff54a8332\" (UID: \"e7d76df0-4f21-4729-9729-1f2ff54a8332\") " Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.398026 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e7d76df0-4f21-4729-9729-1f2ff54a8332-scripts\") pod \"e7d76df0-4f21-4729-9729-1f2ff54a8332\" (UID: \"e7d76df0-4f21-4729-9729-1f2ff54a8332\") " Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.398071 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7d76df0-4f21-4729-9729-1f2ff54a8332-ovn-northd-tls-certs\") pod \"e7d76df0-4f21-4729-9729-1f2ff54a8332\" (UID: \"e7d76df0-4f21-4729-9729-1f2ff54a8332\") " Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.398364 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4ba4170-0240-42d9-85f4-cf3587f39f02-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.398380 4742 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.399790 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7d76df0-4f21-4729-9729-1f2ff54a8332-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "e7d76df0-4f21-4729-9729-1f2ff54a8332" (UID: "e7d76df0-4f21-4729-9729-1f2ff54a8332"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.400791 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7d76df0-4f21-4729-9729-1f2ff54a8332-scripts" (OuterVolumeSpecName: "scripts") pod "e7d76df0-4f21-4729-9729-1f2ff54a8332" (UID: "e7d76df0-4f21-4729-9729-1f2ff54a8332"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.401255 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7d76df0-4f21-4729-9729-1f2ff54a8332-config" (OuterVolumeSpecName: "config") pod "e7d76df0-4f21-4729-9729-1f2ff54a8332" (UID: "e7d76df0-4f21-4729-9729-1f2ff54a8332"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.403982 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone7678-account-delete-n6gdz"] Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.407727 4742 scope.go:117] "RemoveContainer" containerID="69ab6c82edf2ce10d8987c3b5a9194e318538298be58a196bccfa67b05fbaae7" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.411428 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone7678-account-delete-n6gdz"] Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.428754 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7d76df0-4f21-4729-9729-1f2ff54a8332-kube-api-access-q2smw" (OuterVolumeSpecName: "kube-api-access-q2smw") pod "e7d76df0-4f21-4729-9729-1f2ff54a8332" (UID: "e7d76df0-4f21-4729-9729-1f2ff54a8332"). InnerVolumeSpecName "kube-api-access-q2smw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.439192 4742 scope.go:117] "RemoveContainer" containerID="5ab507fe1c04a7056d68dca7aa4680091e6ec02733336d33099e102c29640adb" Dec 05 06:15:25 crc kubenswrapper[4742]: E1205 06:15:25.443177 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8 is running failed: container process not found" containerID="56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 06:15:25 crc kubenswrapper[4742]: E1205 06:15:25.443645 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8 is running failed: container process not found" containerID="56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 06:15:25 crc kubenswrapper[4742]: E1205 06:15:25.443703 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2fe19f91420a01210a21d4371d649e6ddfbc92c7b68947c6b76d971d5d43a7cc" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 06:15:25 crc kubenswrapper[4742]: E1205 06:15:25.444709 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8 is running failed: container process not found" containerID="56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 06:15:25 crc kubenswrapper[4742]: E1205 06:15:25.444749 4742 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-tgnp6" podUID="504b6b10-062b-4d3c-8202-fcfd97bc57aa" containerName="ovsdb-server" Dec 05 06:15:25 crc kubenswrapper[4742]: E1205 06:15:25.445597 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2fe19f91420a01210a21d4371d649e6ddfbc92c7b68947c6b76d971d5d43a7cc" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 06:15:25 crc kubenswrapper[4742]: E1205 06:15:25.448174 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2fe19f91420a01210a21d4371d649e6ddfbc92c7b68947c6b76d971d5d43a7cc" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 06:15:25 crc kubenswrapper[4742]: E1205 06:15:25.448327 4742 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-tgnp6" podUID="504b6b10-062b-4d3c-8202-fcfd97bc57aa" containerName="ovs-vswitchd" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.463211 4742 scope.go:117] "RemoveContainer" containerID="d9012d4468b51b4718118fb879533aaf34f98c7740bbf64d2aadb2a37a988b47" Dec 05 06:15:25 crc kubenswrapper[4742]: E1205 06:15:25.469288 4742 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod61c4b9e1_5266_49eb_8348_3b1034562185.slice/crio-483d1bc42de90c15f97297df684f3dc97dd2bc83aa03249cb365cfe7c27d6ae0\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1a690523_b1e4_4dd5_b280_58fd8b91b3bf.slice/crio-694419fe80f2a74b277bf90929588ec4d678e3a527f5229dd86c95a2ec6b9190\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1a690523_b1e4_4dd5_b280_58fd8b91b3bf.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5b2208e7_3101_4090_9f35_fba640d2f1d9.slice/crio-33bbec5e4406ae9f140c27451e1c8802550957395f5f4f57b77c6a8a654b921c\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5b2208e7_3101_4090_9f35_fba640d2f1d9.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8b956518_9768_477f_9acb_1fc3459427f7.slice\": RecentStats: unable to find data in memory cache]" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.479514 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7d76df0-4f21-4729-9729-1f2ff54a8332-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e7d76df0-4f21-4729-9729-1f2ff54a8332" (UID: "e7d76df0-4f21-4729-9729-1f2ff54a8332"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.488675 4742 scope.go:117] "RemoveContainer" containerID="d650e05da8f6d00412f038e1cc6a4768171d4ecaec420e4cd6c8c0a06f434838" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.499251 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7b5d8165-e06e-4600-9cab-9cf84c010725-rabbitmq-plugins\") pod \"7b5d8165-e06e-4600-9cab-9cf84c010725\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.499333 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7b5d8165-e06e-4600-9cab-9cf84c010725-erlang-cookie-secret\") pod \"7b5d8165-e06e-4600-9cab-9cf84c010725\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.499365 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7b5d8165-e06e-4600-9cab-9cf84c010725-rabbitmq-tls\") pod \"7b5d8165-e06e-4600-9cab-9cf84c010725\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.499396 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"7b5d8165-e06e-4600-9cab-9cf84c010725\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.499447 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7b5d8165-e06e-4600-9cab-9cf84c010725-rabbitmq-erlang-cookie\") pod \"7b5d8165-e06e-4600-9cab-9cf84c010725\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.499486 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7b5d8165-e06e-4600-9cab-9cf84c010725-server-conf\") pod \"7b5d8165-e06e-4600-9cab-9cf84c010725\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.499532 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wd582\" (UniqueName: \"kubernetes.io/projected/7b5d8165-e06e-4600-9cab-9cf84c010725-kube-api-access-wd582\") pod \"7b5d8165-e06e-4600-9cab-9cf84c010725\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.499586 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7b5d8165-e06e-4600-9cab-9cf84c010725-config-data\") pod \"7b5d8165-e06e-4600-9cab-9cf84c010725\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.499609 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7b5d8165-e06e-4600-9cab-9cf84c010725-plugins-conf\") pod \"7b5d8165-e06e-4600-9cab-9cf84c010725\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.499675 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7b5d8165-e06e-4600-9cab-9cf84c010725-rabbitmq-confd\") pod \"7b5d8165-e06e-4600-9cab-9cf84c010725\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.499734 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7b5d8165-e06e-4600-9cab-9cf84c010725-pod-info\") pod \"7b5d8165-e06e-4600-9cab-9cf84c010725\" (UID: \"7b5d8165-e06e-4600-9cab-9cf84c010725\") " Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.500135 4742 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/672ee527-1fdd-4abc-b327-6f9eb6b07080-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.500154 4742 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e7d76df0-4f21-4729-9729-1f2ff54a8332-ovn-rundir\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.500163 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e7d76df0-4f21-4729-9729-1f2ff54a8332-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.500173 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r4nj9\" (UniqueName: \"kubernetes.io/projected/672ee527-1fdd-4abc-b327-6f9eb6b07080-kube-api-access-r4nj9\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.500185 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q2smw\" (UniqueName: \"kubernetes.io/projected/e7d76df0-4f21-4729-9729-1f2ff54a8332-kube-api-access-q2smw\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.500194 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7d76df0-4f21-4729-9729-1f2ff54a8332-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.500203 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7d76df0-4f21-4729-9729-1f2ff54a8332-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.501083 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b5d8165-e06e-4600-9cab-9cf84c010725-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "7b5d8165-e06e-4600-9cab-9cf84c010725" (UID: "7b5d8165-e06e-4600-9cab-9cf84c010725"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.502546 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b5d8165-e06e-4600-9cab-9cf84c010725-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "7b5d8165-e06e-4600-9cab-9cf84c010725" (UID: "7b5d8165-e06e-4600-9cab-9cf84c010725"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.503045 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b5d8165-e06e-4600-9cab-9cf84c010725-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "7b5d8165-e06e-4600-9cab-9cf84c010725" (UID: "7b5d8165-e06e-4600-9cab-9cf84c010725"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.505095 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b5d8165-e06e-4600-9cab-9cf84c010725-kube-api-access-wd582" (OuterVolumeSpecName: "kube-api-access-wd582") pod "7b5d8165-e06e-4600-9cab-9cf84c010725" (UID: "7b5d8165-e06e-4600-9cab-9cf84c010725"). InnerVolumeSpecName "kube-api-access-wd582". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.506145 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "persistence") pod "7b5d8165-e06e-4600-9cab-9cf84c010725" (UID: "7b5d8165-e06e-4600-9cab-9cf84c010725"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.506782 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b5d8165-e06e-4600-9cab-9cf84c010725-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "7b5d8165-e06e-4600-9cab-9cf84c010725" (UID: "7b5d8165-e06e-4600-9cab-9cf84c010725"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.507041 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b5d8165-e06e-4600-9cab-9cf84c010725-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "7b5d8165-e06e-4600-9cab-9cf84c010725" (UID: "7b5d8165-e06e-4600-9cab-9cf84c010725"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.512182 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7d76df0-4f21-4729-9729-1f2ff54a8332-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "e7d76df0-4f21-4729-9729-1f2ff54a8332" (UID: "e7d76df0-4f21-4729-9729-1f2ff54a8332"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.513682 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/7b5d8165-e06e-4600-9cab-9cf84c010725-pod-info" (OuterVolumeSpecName: "pod-info") pod "7b5d8165-e06e-4600-9cab-9cf84c010725" (UID: "7b5d8165-e06e-4600-9cab-9cf84c010725"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.515593 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7d76df0-4f21-4729-9729-1f2ff54a8332-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "e7d76df0-4f21-4729-9729-1f2ff54a8332" (UID: "e7d76df0-4f21-4729-9729-1f2ff54a8332"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.523635 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b5d8165-e06e-4600-9cab-9cf84c010725-config-data" (OuterVolumeSpecName: "config-data") pod "7b5d8165-e06e-4600-9cab-9cf84c010725" (UID: "7b5d8165-e06e-4600-9cab-9cf84c010725"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.540108 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b5d8165-e06e-4600-9cab-9cf84c010725-server-conf" (OuterVolumeSpecName: "server-conf") pod "7b5d8165-e06e-4600-9cab-9cf84c010725" (UID: "7b5d8165-e06e-4600-9cab-9cf84c010725"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:25 crc kubenswrapper[4742]: E1205 06:15:25.546988 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="37ce92fb4c8b7e7a04ce6e6e4b58a8a399c55f7403d7878e536352ac8232319e" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 06:15:25 crc kubenswrapper[4742]: E1205 06:15:25.548372 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="37ce92fb4c8b7e7a04ce6e6e4b58a8a399c55f7403d7878e536352ac8232319e" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 06:15:25 crc kubenswrapper[4742]: E1205 06:15:25.549581 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="37ce92fb4c8b7e7a04ce6e6e4b58a8a399c55f7403d7878e536352ac8232319e" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 06:15:25 crc kubenswrapper[4742]: E1205 06:15:25.549620 4742 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="a1b7e898-ff4e-4523-8602-18d5937c3e5f" containerName="nova-cell1-conductor-conductor" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.590351 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-z77c8"] Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.598310 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b5d8165-e06e-4600-9cab-9cf84c010725-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "7b5d8165-e06e-4600-9cab-9cf84c010725" (UID: "7b5d8165-e06e-4600-9cab-9cf84c010725"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.601252 4742 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7b5d8165-e06e-4600-9cab-9cf84c010725-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.601272 4742 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7d76df0-4f21-4729-9729-1f2ff54a8332-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.601282 4742 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7b5d8165-e06e-4600-9cab-9cf84c010725-server-conf\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.601291 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wd582\" (UniqueName: \"kubernetes.io/projected/7b5d8165-e06e-4600-9cab-9cf84c010725-kube-api-access-wd582\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.601299 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7b5d8165-e06e-4600-9cab-9cf84c010725-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.601307 4742 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7b5d8165-e06e-4600-9cab-9cf84c010725-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.601314 4742 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7b5d8165-e06e-4600-9cab-9cf84c010725-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.601321 4742 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7b5d8165-e06e-4600-9cab-9cf84c010725-pod-info\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.601331 4742 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7b5d8165-e06e-4600-9cab-9cf84c010725-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.601340 4742 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7b5d8165-e06e-4600-9cab-9cf84c010725-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.601348 4742 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7b5d8165-e06e-4600-9cab-9cf84c010725-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.601369 4742 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.601377 4742 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e7d76df0-4f21-4729-9729-1f2ff54a8332-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.624807 4742 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.702948 4742 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.832937 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.838182 4742 scope.go:117] "RemoveContainer" containerID="8e50439d78583b7a689e999179b4561634fceaa140273853e0ddc31e44263528" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.843169 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.863676 4742 scope.go:117] "RemoveContainer" containerID="d650e05da8f6d00412f038e1cc6a4768171d4ecaec420e4cd6c8c0a06f434838" Dec 05 06:15:25 crc kubenswrapper[4742]: E1205 06:15:25.864162 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d650e05da8f6d00412f038e1cc6a4768171d4ecaec420e4cd6c8c0a06f434838\": container with ID starting with d650e05da8f6d00412f038e1cc6a4768171d4ecaec420e4cd6c8c0a06f434838 not found: ID does not exist" containerID="d650e05da8f6d00412f038e1cc6a4768171d4ecaec420e4cd6c8c0a06f434838" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.864190 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d650e05da8f6d00412f038e1cc6a4768171d4ecaec420e4cd6c8c0a06f434838"} err="failed to get container status \"d650e05da8f6d00412f038e1cc6a4768171d4ecaec420e4cd6c8c0a06f434838\": rpc error: code = NotFound desc = could not find container \"d650e05da8f6d00412f038e1cc6a4768171d4ecaec420e4cd6c8c0a06f434838\": container with ID starting with d650e05da8f6d00412f038e1cc6a4768171d4ecaec420e4cd6c8c0a06f434838 not found: ID does not exist" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.864209 4742 scope.go:117] "RemoveContainer" containerID="8e50439d78583b7a689e999179b4561634fceaa140273853e0ddc31e44263528" Dec 05 06:15:25 crc kubenswrapper[4742]: E1205 06:15:25.864420 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e50439d78583b7a689e999179b4561634fceaa140273853e0ddc31e44263528\": container with ID starting with 8e50439d78583b7a689e999179b4561634fceaa140273853e0ddc31e44263528 not found: ID does not exist" containerID="8e50439d78583b7a689e999179b4561634fceaa140273853e0ddc31e44263528" Dec 05 06:15:25 crc kubenswrapper[4742]: I1205 06:15:25.864441 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e50439d78583b7a689e999179b4561634fceaa140273853e0ddc31e44263528"} err="failed to get container status \"8e50439d78583b7a689e999179b4561634fceaa140273853e0ddc31e44263528\": rpc error: code = NotFound desc = could not find container \"8e50439d78583b7a689e999179b4561634fceaa140273853e0ddc31e44263528\": container with ID starting with 8e50439d78583b7a689e999179b4561634fceaa140273853e0ddc31e44263528 not found: ID does not exist" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.041680 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="20ae73b5-51f4-4bcf-ba9c-c35f566cd07e" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.198:3000/\": dial tcp 10.217.0.198:3000: connect: connection refused" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.045731 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.078465 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-655b696477-tbv7n" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.210026 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-config-data\") pod \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.210119 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-credential-keys\") pod \"8d993905-0c76-454d-8eac-8a93674522db\" (UID: \"8d993905-0c76-454d-8eac-8a93674522db\") " Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.210140 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-internal-tls-certs\") pod \"8d993905-0c76-454d-8eac-8a93674522db\" (UID: \"8d993905-0c76-454d-8eac-8a93674522db\") " Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.210160 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-rabbitmq-confd\") pod \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.210197 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-plugins-conf\") pod \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.210213 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdk76\" (UniqueName: \"kubernetes.io/projected/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-kube-api-access-cdk76\") pod \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.210240 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qhcz5\" (UniqueName: \"kubernetes.io/projected/8d993905-0c76-454d-8eac-8a93674522db-kube-api-access-qhcz5\") pod \"8d993905-0c76-454d-8eac-8a93674522db\" (UID: \"8d993905-0c76-454d-8eac-8a93674522db\") " Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.210262 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-public-tls-certs\") pod \"8d993905-0c76-454d-8eac-8a93674522db\" (UID: \"8d993905-0c76-454d-8eac-8a93674522db\") " Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.210281 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-erlang-cookie-secret\") pod \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.210305 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-combined-ca-bundle\") pod \"8d993905-0c76-454d-8eac-8a93674522db\" (UID: \"8d993905-0c76-454d-8eac-8a93674522db\") " Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.210321 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.210338 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-server-conf\") pod \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.210373 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-rabbitmq-erlang-cookie\") pod \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.210395 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-scripts\") pod \"8d993905-0c76-454d-8eac-8a93674522db\" (UID: \"8d993905-0c76-454d-8eac-8a93674522db\") " Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.210410 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-rabbitmq-tls\") pod \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.210445 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-pod-info\") pod \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.210466 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-fernet-keys\") pod \"8d993905-0c76-454d-8eac-8a93674522db\" (UID: \"8d993905-0c76-454d-8eac-8a93674522db\") " Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.210482 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-config-data\") pod \"8d993905-0c76-454d-8eac-8a93674522db\" (UID: \"8d993905-0c76-454d-8eac-8a93674522db\") " Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.210517 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-rabbitmq-plugins\") pod \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\" (UID: \"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e\") " Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.211389 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "d6b096f4-483e-48c5-a3e1-a178c0c5ae6e" (UID: "d6b096f4-483e-48c5-a3e1-a178c0c5ae6e"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.212702 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "d6b096f4-483e-48c5-a3e1-a178c0c5ae6e" (UID: "d6b096f4-483e-48c5-a3e1-a178c0c5ae6e"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.216241 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "d6b096f4-483e-48c5-a3e1-a178c0c5ae6e" (UID: "d6b096f4-483e-48c5-a3e1-a178c0c5ae6e"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.217747 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-scripts" (OuterVolumeSpecName: "scripts") pod "8d993905-0c76-454d-8eac-8a93674522db" (UID: "8d993905-0c76-454d-8eac-8a93674522db"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.217825 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "8d993905-0c76-454d-8eac-8a93674522db" (UID: "8d993905-0c76-454d-8eac-8a93674522db"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.218528 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "persistence") pod "d6b096f4-483e-48c5-a3e1-a178c0c5ae6e" (UID: "d6b096f4-483e-48c5-a3e1-a178c0c5ae6e"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.219035 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "d6b096f4-483e-48c5-a3e1-a178c0c5ae6e" (UID: "d6b096f4-483e-48c5-a3e1-a178c0c5ae6e"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.220276 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "d6b096f4-483e-48c5-a3e1-a178c0c5ae6e" (UID: "d6b096f4-483e-48c5-a3e1-a178c0c5ae6e"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.222708 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "8d993905-0c76-454d-8eac-8a93674522db" (UID: "8d993905-0c76-454d-8eac-8a93674522db"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.222858 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-kube-api-access-cdk76" (OuterVolumeSpecName: "kube-api-access-cdk76") pod "d6b096f4-483e-48c5-a3e1-a178c0c5ae6e" (UID: "d6b096f4-483e-48c5-a3e1-a178c0c5ae6e"). InnerVolumeSpecName "kube-api-access-cdk76". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.229243 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-pod-info" (OuterVolumeSpecName: "pod-info") pod "d6b096f4-483e-48c5-a3e1-a178c0c5ae6e" (UID: "d6b096f4-483e-48c5-a3e1-a178c0c5ae6e"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.235549 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d993905-0c76-454d-8eac-8a93674522db-kube-api-access-qhcz5" (OuterVolumeSpecName: "kube-api-access-qhcz5") pod "8d993905-0c76-454d-8eac-8a93674522db" (UID: "8d993905-0c76-454d-8eac-8a93674522db"). InnerVolumeSpecName "kube-api-access-qhcz5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.238983 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-config-data" (OuterVolumeSpecName: "config-data") pod "d6b096f4-483e-48c5-a3e1-a178c0c5ae6e" (UID: "d6b096f4-483e-48c5-a3e1-a178c0c5ae6e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.239503 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8d993905-0c76-454d-8eac-8a93674522db" (UID: "8d993905-0c76-454d-8eac-8a93674522db"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.250022 4742 generic.go:334] "Generic (PLEG): container finished" podID="d6b096f4-483e-48c5-a3e1-a178c0c5ae6e" containerID="9049700e89ccd644394c6fda74ff3a49949e7dbd626334fac51902707979e0d4" exitCode=0 Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.250136 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e","Type":"ContainerDied","Data":"9049700e89ccd644394c6fda74ff3a49949e7dbd626334fac51902707979e0d4"} Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.250157 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d6b096f4-483e-48c5-a3e1-a178c0c5ae6e","Type":"ContainerDied","Data":"db1336eae52cbd8fcb3aa0e2dd712d4cb75e4737d79ab959c9a2d0267b82f8e4"} Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.250172 4742 scope.go:117] "RemoveContainer" containerID="9049700e89ccd644394c6fda74ff3a49949e7dbd626334fac51902707979e0d4" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.250204 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-config-data" (OuterVolumeSpecName: "config-data") pod "8d993905-0c76-454d-8eac-8a93674522db" (UID: "8d993905-0c76-454d-8eac-8a93674522db"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.250254 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.252625 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_e7d76df0-4f21-4729-9729-1f2ff54a8332/ovn-northd/0.log" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.252674 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"e7d76df0-4f21-4729-9729-1f2ff54a8332","Type":"ContainerDied","Data":"7ea261b8b9f58f9a8bd26bbf4a01a3d6327f6c7c4ca52ef53507fcb426ec512a"} Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.252741 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.255341 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7b5d8165-e06e-4600-9cab-9cf84c010725","Type":"ContainerDied","Data":"048aa81f305557f2b2564b9bcfec1e2440cbf9794ec21c7ad3b2100a54b456cc"} Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.255448 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.262019 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-655b696477-tbv7n" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.262064 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-655b696477-tbv7n" event={"ID":"8d993905-0c76-454d-8eac-8a93674522db","Type":"ContainerDied","Data":"19b3e95c718c3665ff0670d2d832d7d93791bf4dcc4a5e276fe859096cdb96f7"} Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.262013 4742 generic.go:334] "Generic (PLEG): container finished" podID="8d993905-0c76-454d-8eac-8a93674522db" containerID="19b3e95c718c3665ff0670d2d832d7d93791bf4dcc4a5e276fe859096cdb96f7" exitCode=0 Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.262316 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-655b696477-tbv7n" event={"ID":"8d993905-0c76-454d-8eac-8a93674522db","Type":"ContainerDied","Data":"99f87f31d623370fcc74d863175c5b38300b8dcb1e298344617db859b13f9406"} Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.266217 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "8d993905-0c76-454d-8eac-8a93674522db" (UID: "8d993905-0c76-454d-8eac-8a93674522db"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.267758 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-server-conf" (OuterVolumeSpecName: "server-conf") pod "d6b096f4-483e-48c5-a3e1-a178c0c5ae6e" (UID: "d6b096f4-483e-48c5-a3e1-a178c0c5ae6e"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.286566 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "8d993905-0c76-454d-8eac-8a93674522db" (UID: "8d993905-0c76-454d-8eac-8a93674522db"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.298971 4742 scope.go:117] "RemoveContainer" containerID="e37747c80db44fb441e66ad6045bb07df3a0d78b12b382201b8a54d8b6957d0b" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.311764 4742 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.311788 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.311799 4742 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.311809 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.311818 4742 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.311828 4742 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.311836 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdk76\" (UniqueName: \"kubernetes.io/projected/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-kube-api-access-cdk76\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.311845 4742 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.311853 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qhcz5\" (UniqueName: \"kubernetes.io/projected/8d993905-0c76-454d-8eac-8a93674522db-kube-api-access-qhcz5\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.311863 4742 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.311872 4742 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.311880 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.311911 4742 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.311920 4742 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-server-conf\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.311928 4742 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.311936 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d993905-0c76-454d-8eac-8a93674522db-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.311944 4742 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.311953 4742 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-pod-info\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.324669 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "d6b096f4-483e-48c5-a3e1-a178c0c5ae6e" (UID: "d6b096f4-483e-48c5-a3e1-a178c0c5ae6e"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.324894 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.324965 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.328878 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.334392 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.334998 4742 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.351299 4742 scope.go:117] "RemoveContainer" containerID="9049700e89ccd644394c6fda74ff3a49949e7dbd626334fac51902707979e0d4" Dec 05 06:15:26 crc kubenswrapper[4742]: E1205 06:15:26.351686 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9049700e89ccd644394c6fda74ff3a49949e7dbd626334fac51902707979e0d4\": container with ID starting with 9049700e89ccd644394c6fda74ff3a49949e7dbd626334fac51902707979e0d4 not found: ID does not exist" containerID="9049700e89ccd644394c6fda74ff3a49949e7dbd626334fac51902707979e0d4" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.351728 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9049700e89ccd644394c6fda74ff3a49949e7dbd626334fac51902707979e0d4"} err="failed to get container status \"9049700e89ccd644394c6fda74ff3a49949e7dbd626334fac51902707979e0d4\": rpc error: code = NotFound desc = could not find container \"9049700e89ccd644394c6fda74ff3a49949e7dbd626334fac51902707979e0d4\": container with ID starting with 9049700e89ccd644394c6fda74ff3a49949e7dbd626334fac51902707979e0d4 not found: ID does not exist" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.351754 4742 scope.go:117] "RemoveContainer" containerID="e37747c80db44fb441e66ad6045bb07df3a0d78b12b382201b8a54d8b6957d0b" Dec 05 06:15:26 crc kubenswrapper[4742]: E1205 06:15:26.352018 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e37747c80db44fb441e66ad6045bb07df3a0d78b12b382201b8a54d8b6957d0b\": container with ID starting with e37747c80db44fb441e66ad6045bb07df3a0d78b12b382201b8a54d8b6957d0b not found: ID does not exist" containerID="e37747c80db44fb441e66ad6045bb07df3a0d78b12b382201b8a54d8b6957d0b" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.352039 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e37747c80db44fb441e66ad6045bb07df3a0d78b12b382201b8a54d8b6957d0b"} err="failed to get container status \"e37747c80db44fb441e66ad6045bb07df3a0d78b12b382201b8a54d8b6957d0b\": rpc error: code = NotFound desc = could not find container \"e37747c80db44fb441e66ad6045bb07df3a0d78b12b382201b8a54d8b6957d0b\": container with ID starting with e37747c80db44fb441e66ad6045bb07df3a0d78b12b382201b8a54d8b6957d0b not found: ID does not exist" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.352065 4742 scope.go:117] "RemoveContainer" containerID="d1934753cd07a71a87a5b0d5f8a6aecf1b11e12621dc578d17d0fc95dbb8f143" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.373429 4742 scope.go:117] "RemoveContainer" containerID="caa9cfd6937fda940888bb64cbccaa6adf27580ea4e177e3d3adf4b5e4e8b93d" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.390866 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a690523-b1e4-4dd5-b280-58fd8b91b3bf" path="/var/lib/kubelet/pods/1a690523-b1e4-4dd5-b280-58fd8b91b3bf/volumes" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.391231 4742 scope.go:117] "RemoveContainer" containerID="4c9ed2559817c2da1b28311959a187477072585e1e74ef4ffe26d1ce23f9ee55" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.391538 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b2208e7-3101-4090-9f35-fba640d2f1d9" path="/var/lib/kubelet/pods/5b2208e7-3101-4090-9f35-fba640d2f1d9/volumes" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.391870 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="672ee527-1fdd-4abc-b327-6f9eb6b07080" path="/var/lib/kubelet/pods/672ee527-1fdd-4abc-b327-6f9eb6b07080/volumes" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.392370 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7038cd99-8151-4157-93c6-3b7f5b9ce25e" path="/var/lib/kubelet/pods/7038cd99-8151-4157-93c6-3b7f5b9ce25e/volumes" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.393572 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b5d8165-e06e-4600-9cab-9cf84c010725" path="/var/lib/kubelet/pods/7b5d8165-e06e-4600-9cab-9cf84c010725/volumes" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.394136 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa702931-d853-4f8b-b0d8-58f5476bb7c2" path="/var/lib/kubelet/pods/aa702931-d853-4f8b-b0d8-58f5476bb7c2/volumes" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.395294 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4ba4170-0240-42d9-85f4-cf3587f39f02" path="/var/lib/kubelet/pods/c4ba4170-0240-42d9-85f4-cf3587f39f02/volumes" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.395963 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7a764d5-447f-483d-b819-0e398e749600" path="/var/lib/kubelet/pods/d7a764d5-447f-483d-b819-0e398e749600/volumes" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.396596 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e42757b3-029e-4fe9-917f-73331394524e" path="/var/lib/kubelet/pods/e42757b3-029e-4fe9-917f-73331394524e/volumes" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.397551 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6063f78-1b45-493e-ae25-62239a1ed5e3" path="/var/lib/kubelet/pods/e6063f78-1b45-493e-ae25-62239a1ed5e3/volumes" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.398311 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7d76df0-4f21-4729-9729-1f2ff54a8332" path="/var/lib/kubelet/pods/e7d76df0-4f21-4729-9729-1f2ff54a8332/volumes" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.398856 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9974486-076d-4493-af32-a08eef334572" path="/var/lib/kubelet/pods/e9974486-076d-4493-af32-a08eef334572/volumes" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.399735 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb4dce96-8228-455b-9edc-37a62af6e732" path="/var/lib/kubelet/pods/fb4dce96-8228-455b-9edc-37a62af6e732/volumes" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.417626 4742 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.417987 4742 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.426158 4742 scope.go:117] "RemoveContainer" containerID="2a73506fa683772c445e145b7336056dc8c87df69830067f0bc2e973540b7546" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.456504 4742 scope.go:117] "RemoveContainer" containerID="19b3e95c718c3665ff0670d2d832d7d93791bf4dcc4a5e276fe859096cdb96f7" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.481840 4742 scope.go:117] "RemoveContainer" containerID="19b3e95c718c3665ff0670d2d832d7d93791bf4dcc4a5e276fe859096cdb96f7" Dec 05 06:15:26 crc kubenswrapper[4742]: E1205 06:15:26.482363 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19b3e95c718c3665ff0670d2d832d7d93791bf4dcc4a5e276fe859096cdb96f7\": container with ID starting with 19b3e95c718c3665ff0670d2d832d7d93791bf4dcc4a5e276fe859096cdb96f7 not found: ID does not exist" containerID="19b3e95c718c3665ff0670d2d832d7d93791bf4dcc4a5e276fe859096cdb96f7" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.482408 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19b3e95c718c3665ff0670d2d832d7d93791bf4dcc4a5e276fe859096cdb96f7"} err="failed to get container status \"19b3e95c718c3665ff0670d2d832d7d93791bf4dcc4a5e276fe859096cdb96f7\": rpc error: code = NotFound desc = could not find container \"19b3e95c718c3665ff0670d2d832d7d93791bf4dcc4a5e276fe859096cdb96f7\": container with ID starting with 19b3e95c718c3665ff0670d2d832d7d93791bf4dcc4a5e276fe859096cdb96f7 not found: ID does not exist" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.552502 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-bc83-account-create-update-6t2vt"] Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.560092 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glancebc83-account-delete-wqtlb"] Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.603374 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-5llvc"] Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.611890 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-bc83-account-create-update-6t2vt"] Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.642136 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glancebc83-account-delete-wqtlb"] Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.668124 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-5llvc"] Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.683116 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-vjmxb"] Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.709117 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-vjmxb"] Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.719390 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-edde-account-create-update-nwbxj"] Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.745444 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinderedde-account-delete-pwsvp"] Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.763842 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-edde-account-create-update-nwbxj"] Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.778118 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinderedde-account-delete-pwsvp"] Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.790157 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-655b696477-tbv7n"] Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.806109 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-655b696477-tbv7n"] Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.823113 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.836238 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.852332 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-2c8pq"] Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.855108 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-2c8pq"] Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.860110 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron5be2-account-delete-l9sfl"] Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.864560 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron5be2-account-delete-l9sfl"] Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.869430 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5be2-account-create-update-tntgt"] Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.874428 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-5be2-account-create-update-tntgt"] Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.883389 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.947205 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-ncssk"] Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.964607 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-ncssk"] Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.970694 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-f9da-account-create-update-zhzzh"] Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.976111 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-f9da-account-create-update-zhzzh"] Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.979187 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbicanf9da-account-delete-4bv5f"] Dec 05 06:15:26 crc kubenswrapper[4742]: I1205 06:15:26.983228 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbicanf9da-account-delete-4bv5f"] Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.037660 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-mlg7m"] Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.043469 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1b7e898-ff4e-4523-8602-18d5937c3e5f-config-data\") pod \"a1b7e898-ff4e-4523-8602-18d5937c3e5f\" (UID: \"a1b7e898-ff4e-4523-8602-18d5937c3e5f\") " Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.043510 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1b7e898-ff4e-4523-8602-18d5937c3e5f-combined-ca-bundle\") pod \"a1b7e898-ff4e-4523-8602-18d5937c3e5f\" (UID: \"a1b7e898-ff4e-4523-8602-18d5937c3e5f\") " Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.043631 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2k29j\" (UniqueName: \"kubernetes.io/projected/a1b7e898-ff4e-4523-8602-18d5937c3e5f-kube-api-access-2k29j\") pod \"a1b7e898-ff4e-4523-8602-18d5937c3e5f\" (UID: \"a1b7e898-ff4e-4523-8602-18d5937c3e5f\") " Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.048032 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-mlg7m"] Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.048490 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1b7e898-ff4e-4523-8602-18d5937c3e5f-kube-api-access-2k29j" (OuterVolumeSpecName: "kube-api-access-2k29j") pod "a1b7e898-ff4e-4523-8602-18d5937c3e5f" (UID: "a1b7e898-ff4e-4523-8602-18d5937c3e5f"). InnerVolumeSpecName "kube-api-access-2k29j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.066432 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement9800-account-delete-mlmb7"] Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.070679 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1b7e898-ff4e-4523-8602-18d5937c3e5f-config-data" (OuterVolumeSpecName: "config-data") pod "a1b7e898-ff4e-4523-8602-18d5937c3e5f" (UID: "a1b7e898-ff4e-4523-8602-18d5937c3e5f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.071498 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-9800-account-create-update-gpgxj"] Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.079919 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement9800-account-delete-mlmb7"] Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.082836 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1b7e898-ff4e-4523-8602-18d5937c3e5f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a1b7e898-ff4e-4523-8602-18d5937c3e5f" (UID: "a1b7e898-ff4e-4523-8602-18d5937c3e5f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.085465 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-9800-account-create-update-gpgxj"] Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.145017 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1b7e898-ff4e-4523-8602-18d5937c3e5f-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.145049 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1b7e898-ff4e-4523-8602-18d5937c3e5f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.145144 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2k29j\" (UniqueName: \"kubernetes.io/projected/a1b7e898-ff4e-4523-8602-18d5937c3e5f-kube-api-access-2k29j\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.245989 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-66f7f988b5-b5pzf" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.286402 4742 generic.go:334] "Generic (PLEG): container finished" podID="2f4c3ae5-d78c-4ddb-953d-cbee5b815be9" containerID="2f4156cfc6ee4ae12fa0ce4c17f20f7d82287d71fe307700ba39947bbecf3c02" exitCode=0 Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.286475 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7bd94f978b-h9cm5" event={"ID":"2f4c3ae5-d78c-4ddb-953d-cbee5b815be9","Type":"ContainerDied","Data":"2f4156cfc6ee4ae12fa0ce4c17f20f7d82287d71fe307700ba39947bbecf3c02"} Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.297323 4742 generic.go:334] "Generic (PLEG): container finished" podID="fa30e851-f383-42c0-9e09-d8c896ed77ad" containerID="c6a468d9df31190dc440d8ef2e011bdc021d2f9b7a5c87349652c7adab9aea44" exitCode=0 Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.297452 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-66f7f988b5-b5pzf" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.297553 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-66f7f988b5-b5pzf" event={"ID":"fa30e851-f383-42c0-9e09-d8c896ed77ad","Type":"ContainerDied","Data":"c6a468d9df31190dc440d8ef2e011bdc021d2f9b7a5c87349652c7adab9aea44"} Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.297622 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-66f7f988b5-b5pzf" event={"ID":"fa30e851-f383-42c0-9e09-d8c896ed77ad","Type":"ContainerDied","Data":"399de0c84bd1ba34d951be5e96866f611d5d76be3f684cfa27b1c3935f262f81"} Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.297646 4742 scope.go:117] "RemoveContainer" containerID="c6a468d9df31190dc440d8ef2e011bdc021d2f9b7a5c87349652c7adab9aea44" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.317686 4742 generic.go:334] "Generic (PLEG): container finished" podID="a1b7e898-ff4e-4523-8602-18d5937c3e5f" containerID="37ce92fb4c8b7e7a04ce6e6e4b58a8a399c55f7403d7878e536352ac8232319e" exitCode=0 Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.317977 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-z77c8" podUID="439adfba-ae29-4db5-8a77-88eede9d0bd9" containerName="registry-server" containerID="cri-o://c5027492da075e5f7eb6f411b5685735804aba663f81c22f48a7032edb179797" gracePeriod=2 Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.318474 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"a1b7e898-ff4e-4523-8602-18d5937c3e5f","Type":"ContainerDied","Data":"37ce92fb4c8b7e7a04ce6e6e4b58a8a399c55f7403d7878e536352ac8232319e"} Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.318504 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"a1b7e898-ff4e-4523-8602-18d5937c3e5f","Type":"ContainerDied","Data":"88478d3ec4837dfb5acf43c989b2eebe0430c67c0ea3d138798bc84335221a76"} Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.318580 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.348810 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fa30e851-f383-42c0-9e09-d8c896ed77ad-config-data-custom\") pod \"fa30e851-f383-42c0-9e09-d8c896ed77ad\" (UID: \"fa30e851-f383-42c0-9e09-d8c896ed77ad\") " Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.348884 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-56xlt\" (UniqueName: \"kubernetes.io/projected/fa30e851-f383-42c0-9e09-d8c896ed77ad-kube-api-access-56xlt\") pod \"fa30e851-f383-42c0-9e09-d8c896ed77ad\" (UID: \"fa30e851-f383-42c0-9e09-d8c896ed77ad\") " Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.348920 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa30e851-f383-42c0-9e09-d8c896ed77ad-combined-ca-bundle\") pod \"fa30e851-f383-42c0-9e09-d8c896ed77ad\" (UID: \"fa30e851-f383-42c0-9e09-d8c896ed77ad\") " Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.348966 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa30e851-f383-42c0-9e09-d8c896ed77ad-config-data\") pod \"fa30e851-f383-42c0-9e09-d8c896ed77ad\" (UID: \"fa30e851-f383-42c0-9e09-d8c896ed77ad\") " Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.349156 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa30e851-f383-42c0-9e09-d8c896ed77ad-logs\") pod \"fa30e851-f383-42c0-9e09-d8c896ed77ad\" (UID: \"fa30e851-f383-42c0-9e09-d8c896ed77ad\") " Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.350896 4742 scope.go:117] "RemoveContainer" containerID="9859f27d8f4bb1f62c35ce8e77ebd26f8aca7e99762a5eb2b0f4ad252e8f2430" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.352048 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa30e851-f383-42c0-9e09-d8c896ed77ad-logs" (OuterVolumeSpecName: "logs") pod "fa30e851-f383-42c0-9e09-d8c896ed77ad" (UID: "fa30e851-f383-42c0-9e09-d8c896ed77ad"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.352294 4742 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa30e851-f383-42c0-9e09-d8c896ed77ad-logs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.367263 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa30e851-f383-42c0-9e09-d8c896ed77ad-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "fa30e851-f383-42c0-9e09-d8c896ed77ad" (UID: "fa30e851-f383-42c0-9e09-d8c896ed77ad"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.381432 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa30e851-f383-42c0-9e09-d8c896ed77ad-kube-api-access-56xlt" (OuterVolumeSpecName: "kube-api-access-56xlt") pod "fa30e851-f383-42c0-9e09-d8c896ed77ad" (UID: "fa30e851-f383-42c0-9e09-d8c896ed77ad"). InnerVolumeSpecName "kube-api-access-56xlt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.382076 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.383219 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa30e851-f383-42c0-9e09-d8c896ed77ad-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fa30e851-f383-42c0-9e09-d8c896ed77ad" (UID: "fa30e851-f383-42c0-9e09-d8c896ed77ad"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.387722 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.388137 4742 scope.go:117] "RemoveContainer" containerID="c6a468d9df31190dc440d8ef2e011bdc021d2f9b7a5c87349652c7adab9aea44" Dec 05 06:15:27 crc kubenswrapper[4742]: E1205 06:15:27.388603 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6a468d9df31190dc440d8ef2e011bdc021d2f9b7a5c87349652c7adab9aea44\": container with ID starting with c6a468d9df31190dc440d8ef2e011bdc021d2f9b7a5c87349652c7adab9aea44 not found: ID does not exist" containerID="c6a468d9df31190dc440d8ef2e011bdc021d2f9b7a5c87349652c7adab9aea44" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.388632 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6a468d9df31190dc440d8ef2e011bdc021d2f9b7a5c87349652c7adab9aea44"} err="failed to get container status \"c6a468d9df31190dc440d8ef2e011bdc021d2f9b7a5c87349652c7adab9aea44\": rpc error: code = NotFound desc = could not find container \"c6a468d9df31190dc440d8ef2e011bdc021d2f9b7a5c87349652c7adab9aea44\": container with ID starting with c6a468d9df31190dc440d8ef2e011bdc021d2f9b7a5c87349652c7adab9aea44 not found: ID does not exist" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.388651 4742 scope.go:117] "RemoveContainer" containerID="9859f27d8f4bb1f62c35ce8e77ebd26f8aca7e99762a5eb2b0f4ad252e8f2430" Dec 05 06:15:27 crc kubenswrapper[4742]: E1205 06:15:27.389591 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9859f27d8f4bb1f62c35ce8e77ebd26f8aca7e99762a5eb2b0f4ad252e8f2430\": container with ID starting with 9859f27d8f4bb1f62c35ce8e77ebd26f8aca7e99762a5eb2b0f4ad252e8f2430 not found: ID does not exist" containerID="9859f27d8f4bb1f62c35ce8e77ebd26f8aca7e99762a5eb2b0f4ad252e8f2430" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.389610 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9859f27d8f4bb1f62c35ce8e77ebd26f8aca7e99762a5eb2b0f4ad252e8f2430"} err="failed to get container status \"9859f27d8f4bb1f62c35ce8e77ebd26f8aca7e99762a5eb2b0f4ad252e8f2430\": rpc error: code = NotFound desc = could not find container \"9859f27d8f4bb1f62c35ce8e77ebd26f8aca7e99762a5eb2b0f4ad252e8f2430\": container with ID starting with 9859f27d8f4bb1f62c35ce8e77ebd26f8aca7e99762a5eb2b0f4ad252e8f2430 not found: ID does not exist" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.389625 4742 scope.go:117] "RemoveContainer" containerID="37ce92fb4c8b7e7a04ce6e6e4b58a8a399c55f7403d7878e536352ac8232319e" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.413741 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa30e851-f383-42c0-9e09-d8c896ed77ad-config-data" (OuterVolumeSpecName: "config-data") pod "fa30e851-f383-42c0-9e09-d8c896ed77ad" (UID: "fa30e851-f383-42c0-9e09-d8c896ed77ad"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.445076 4742 scope.go:117] "RemoveContainer" containerID="37ce92fb4c8b7e7a04ce6e6e4b58a8a399c55f7403d7878e536352ac8232319e" Dec 05 06:15:27 crc kubenswrapper[4742]: E1205 06:15:27.445410 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37ce92fb4c8b7e7a04ce6e6e4b58a8a399c55f7403d7878e536352ac8232319e\": container with ID starting with 37ce92fb4c8b7e7a04ce6e6e4b58a8a399c55f7403d7878e536352ac8232319e not found: ID does not exist" containerID="37ce92fb4c8b7e7a04ce6e6e4b58a8a399c55f7403d7878e536352ac8232319e" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.445439 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37ce92fb4c8b7e7a04ce6e6e4b58a8a399c55f7403d7878e536352ac8232319e"} err="failed to get container status \"37ce92fb4c8b7e7a04ce6e6e4b58a8a399c55f7403d7878e536352ac8232319e\": rpc error: code = NotFound desc = could not find container \"37ce92fb4c8b7e7a04ce6e6e4b58a8a399c55f7403d7878e536352ac8232319e\": container with ID starting with 37ce92fb4c8b7e7a04ce6e6e4b58a8a399c55f7403d7878e536352ac8232319e not found: ID does not exist" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.447659 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7bd94f978b-h9cm5" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.455026 4742 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fa30e851-f383-42c0-9e09-d8c896ed77ad-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.455184 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-56xlt\" (UniqueName: \"kubernetes.io/projected/fa30e851-f383-42c0-9e09-d8c896ed77ad-kube-api-access-56xlt\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.455264 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa30e851-f383-42c0-9e09-d8c896ed77ad-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.455337 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa30e851-f383-42c0-9e09-d8c896ed77ad-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.560483 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f4c3ae5-d78c-4ddb-953d-cbee5b815be9-config-data\") pod \"2f4c3ae5-d78c-4ddb-953d-cbee5b815be9\" (UID: \"2f4c3ae5-d78c-4ddb-953d-cbee5b815be9\") " Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.560553 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f4c3ae5-d78c-4ddb-953d-cbee5b815be9-logs\") pod \"2f4c3ae5-d78c-4ddb-953d-cbee5b815be9\" (UID: \"2f4c3ae5-d78c-4ddb-953d-cbee5b815be9\") " Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.560621 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c289g\" (UniqueName: \"kubernetes.io/projected/2f4c3ae5-d78c-4ddb-953d-cbee5b815be9-kube-api-access-c289g\") pod \"2f4c3ae5-d78c-4ddb-953d-cbee5b815be9\" (UID: \"2f4c3ae5-d78c-4ddb-953d-cbee5b815be9\") " Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.560679 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f4c3ae5-d78c-4ddb-953d-cbee5b815be9-combined-ca-bundle\") pod \"2f4c3ae5-d78c-4ddb-953d-cbee5b815be9\" (UID: \"2f4c3ae5-d78c-4ddb-953d-cbee5b815be9\") " Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.560771 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2f4c3ae5-d78c-4ddb-953d-cbee5b815be9-config-data-custom\") pod \"2f4c3ae5-d78c-4ddb-953d-cbee5b815be9\" (UID: \"2f4c3ae5-d78c-4ddb-953d-cbee5b815be9\") " Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.563459 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f4c3ae5-d78c-4ddb-953d-cbee5b815be9-logs" (OuterVolumeSpecName: "logs") pod "2f4c3ae5-d78c-4ddb-953d-cbee5b815be9" (UID: "2f4c3ae5-d78c-4ddb-953d-cbee5b815be9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.566228 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f4c3ae5-d78c-4ddb-953d-cbee5b815be9-kube-api-access-c289g" (OuterVolumeSpecName: "kube-api-access-c289g") pod "2f4c3ae5-d78c-4ddb-953d-cbee5b815be9" (UID: "2f4c3ae5-d78c-4ddb-953d-cbee5b815be9"). InnerVolumeSpecName "kube-api-access-c289g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.575926 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f4c3ae5-d78c-4ddb-953d-cbee5b815be9-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "2f4c3ae5-d78c-4ddb-953d-cbee5b815be9" (UID: "2f4c3ae5-d78c-4ddb-953d-cbee5b815be9"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.591942 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f4c3ae5-d78c-4ddb-953d-cbee5b815be9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2f4c3ae5-d78c-4ddb-953d-cbee5b815be9" (UID: "2f4c3ae5-d78c-4ddb-953d-cbee5b815be9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.637478 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="7038cd99-8151-4157-93c6-3b7f5b9ce25e" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.201:8775/\": dial tcp 10.217.0.201:8775: i/o timeout" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.638118 4742 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="7038cd99-8151-4157-93c6-3b7f5b9ce25e" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.201:8775/\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.650588 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f4c3ae5-d78c-4ddb-953d-cbee5b815be9-config-data" (OuterVolumeSpecName: "config-data") pod "2f4c3ae5-d78c-4ddb-953d-cbee5b815be9" (UID: "2f4c3ae5-d78c-4ddb-953d-cbee5b815be9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.663827 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c289g\" (UniqueName: \"kubernetes.io/projected/2f4c3ae5-d78c-4ddb-953d-cbee5b815be9-kube-api-access-c289g\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.663865 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f4c3ae5-d78c-4ddb-953d-cbee5b815be9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.663874 4742 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2f4c3ae5-d78c-4ddb-953d-cbee5b815be9-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.663883 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f4c3ae5-d78c-4ddb-953d-cbee5b815be9-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.663894 4742 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f4c3ae5-d78c-4ddb-953d-cbee5b815be9-logs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.702790 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-66f7f988b5-b5pzf"] Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.708744 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-66f7f988b5-b5pzf"] Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.777165 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z77c8" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.967139 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d74mn\" (UniqueName: \"kubernetes.io/projected/439adfba-ae29-4db5-8a77-88eede9d0bd9-kube-api-access-d74mn\") pod \"439adfba-ae29-4db5-8a77-88eede9d0bd9\" (UID: \"439adfba-ae29-4db5-8a77-88eede9d0bd9\") " Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.967322 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/439adfba-ae29-4db5-8a77-88eede9d0bd9-catalog-content\") pod \"439adfba-ae29-4db5-8a77-88eede9d0bd9\" (UID: \"439adfba-ae29-4db5-8a77-88eede9d0bd9\") " Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.967345 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/439adfba-ae29-4db5-8a77-88eede9d0bd9-utilities\") pod \"439adfba-ae29-4db5-8a77-88eede9d0bd9\" (UID: \"439adfba-ae29-4db5-8a77-88eede9d0bd9\") " Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.968334 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/439adfba-ae29-4db5-8a77-88eede9d0bd9-utilities" (OuterVolumeSpecName: "utilities") pod "439adfba-ae29-4db5-8a77-88eede9d0bd9" (UID: "439adfba-ae29-4db5-8a77-88eede9d0bd9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:15:27 crc kubenswrapper[4742]: I1205 06:15:27.970316 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/439adfba-ae29-4db5-8a77-88eede9d0bd9-kube-api-access-d74mn" (OuterVolumeSpecName: "kube-api-access-d74mn") pod "439adfba-ae29-4db5-8a77-88eede9d0bd9" (UID: "439adfba-ae29-4db5-8a77-88eede9d0bd9"). InnerVolumeSpecName "kube-api-access-d74mn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.068932 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d74mn\" (UniqueName: \"kubernetes.io/projected/439adfba-ae29-4db5-8a77-88eede9d0bd9-kube-api-access-d74mn\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.068972 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/439adfba-ae29-4db5-8a77-88eede9d0bd9-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.076395 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/439adfba-ae29-4db5-8a77-88eede9d0bd9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "439adfba-ae29-4db5-8a77-88eede9d0bd9" (UID: "439adfba-ae29-4db5-8a77-88eede9d0bd9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.170542 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/439adfba-ae29-4db5-8a77-88eede9d0bd9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.215915 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.330570 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7bd94f978b-h9cm5" event={"ID":"2f4c3ae5-d78c-4ddb-953d-cbee5b815be9","Type":"ContainerDied","Data":"1d7c4f5e9ea46946dded5d22e0e6596417c3c84bd505c0da508bce48c06d7f74"} Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.330893 4742 scope.go:117] "RemoveContainer" containerID="2f4156cfc6ee4ae12fa0ce4c17f20f7d82287d71fe307700ba39947bbecf3c02" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.330600 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7bd94f978b-h9cm5" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.335134 4742 generic.go:334] "Generic (PLEG): container finished" podID="439adfba-ae29-4db5-8a77-88eede9d0bd9" containerID="c5027492da075e5f7eb6f411b5685735804aba663f81c22f48a7032edb179797" exitCode=0 Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.335217 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z77c8" event={"ID":"439adfba-ae29-4db5-8a77-88eede9d0bd9","Type":"ContainerDied","Data":"c5027492da075e5f7eb6f411b5685735804aba663f81c22f48a7032edb179797"} Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.335249 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z77c8" event={"ID":"439adfba-ae29-4db5-8a77-88eede9d0bd9","Type":"ContainerDied","Data":"8b89c9f2b2504d5c4cee100630a761ee3633b3e0923a5d4ea3c4d2cf35dd3a16"} Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.335324 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z77c8" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.341632 4742 generic.go:334] "Generic (PLEG): container finished" podID="20ae73b5-51f4-4bcf-ba9c-c35f566cd07e" containerID="4c2c76854fdd4144c4cb1d5dac7df46a88805bb483cbfd5411dfc86a014d19ea" exitCode=0 Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.341760 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e","Type":"ContainerDied","Data":"4c2c76854fdd4144c4cb1d5dac7df46a88805bb483cbfd5411dfc86a014d19ea"} Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.341794 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e","Type":"ContainerDied","Data":"db7aa95e95eca89e07a6046673364dfc59b0c8e13e9d25dc044cc9bebbcca098"} Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.341867 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.357834 4742 scope.go:117] "RemoveContainer" containerID="3710b941ebb6d9eff059006febf49493e12500d9cc0dc124f356193d93849a0b" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.367677 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-7bd94f978b-h9cm5"] Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.374351 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-scripts\") pod \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\" (UID: \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\") " Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.374471 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-log-httpd\") pod \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\" (UID: \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\") " Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.374573 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-ceilometer-tls-certs\") pod \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\" (UID: \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\") " Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.374627 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lmgd9\" (UniqueName: \"kubernetes.io/projected/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-kube-api-access-lmgd9\") pod \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\" (UID: \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\") " Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.374674 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-run-httpd\") pod \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\" (UID: \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\") " Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.374715 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-sg-core-conf-yaml\") pod \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\" (UID: \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\") " Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.374823 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-config-data\") pod \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\" (UID: \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\") " Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.374864 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-combined-ca-bundle\") pod \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\" (UID: \"20ae73b5-51f4-4bcf-ba9c-c35f566cd07e\") " Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.375621 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "20ae73b5-51f4-4bcf-ba9c-c35f566cd07e" (UID: "20ae73b5-51f4-4bcf-ba9c-c35f566cd07e"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.376142 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "20ae73b5-51f4-4bcf-ba9c-c35f566cd07e" (UID: "20ae73b5-51f4-4bcf-ba9c-c35f566cd07e"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.385922 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-scripts" (OuterVolumeSpecName: "scripts") pod "20ae73b5-51f4-4bcf-ba9c-c35f566cd07e" (UID: "20ae73b5-51f4-4bcf-ba9c-c35f566cd07e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.385971 4742 scope.go:117] "RemoveContainer" containerID="c5027492da075e5f7eb6f411b5685735804aba663f81c22f48a7032edb179797" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.389291 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-kube-api-access-lmgd9" (OuterVolumeSpecName: "kube-api-access-lmgd9") pod "20ae73b5-51f4-4bcf-ba9c-c35f566cd07e" (UID: "20ae73b5-51f4-4bcf-ba9c-c35f566cd07e"). InnerVolumeSpecName "kube-api-access-lmgd9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.399113 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d395b5f-0ac7-4a77-ac68-27bc1b40915f" path="/var/lib/kubelet/pods/1d395b5f-0ac7-4a77-ac68-27bc1b40915f/volumes" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.400246 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0" path="/var/lib/kubelet/pods/1ee0940e-e4c9-45cb-a6a6-c27f1c5b30c0/volumes" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.401262 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2eb42b8d-3238-4559-99c7-92255d22f81a" path="/var/lib/kubelet/pods/2eb42b8d-3238-4559-99c7-92255d22f81a/volumes" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.402337 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad" path="/var/lib/kubelet/pods/4730dc9f-0e9d-41b9-ab01-9f8ae715f3ad/volumes" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.404230 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49b98e00-ff84-46a9-b808-262b1246348c" path="/var/lib/kubelet/pods/49b98e00-ff84-46a9-b808-262b1246348c/volumes" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.405281 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e3a818a-cec4-45bb-9da7-5f26059045a8" path="/var/lib/kubelet/pods/4e3a818a-cec4-45bb-9da7-5f26059045a8/volumes" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.406634 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f2ab762-07a0-426d-a84a-a53ad7e2fef0" path="/var/lib/kubelet/pods/4f2ab762-07a0-426d-a84a-a53ad7e2fef0/volumes" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.408596 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="606e80d6-e92c-4f5d-9806-33e538679939" path="/var/lib/kubelet/pods/606e80d6-e92c-4f5d-9806-33e538679939/volumes" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.409641 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61c4b9e1-5266-49eb-8348-3b1034562185" path="/var/lib/kubelet/pods/61c4b9e1-5266-49eb-8348-3b1034562185/volumes" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.411081 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a5ca1f6-73b0-43da-82c6-995495666585" path="/var/lib/kubelet/pods/6a5ca1f6-73b0-43da-82c6-995495666585/volumes" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.411968 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6" path="/var/lib/kubelet/pods/6bfd6781-fa3f-4cc4-9ba9-9dd169d44af6/volumes" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.413219 4742 scope.go:117] "RemoveContainer" containerID="69825182fb26d31bf0e53f36361245e9235ba3311974d988fd1997b2ec68f6cb" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.416929 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b956518-9768-477f-9acb-1fc3459427f7" path="/var/lib/kubelet/pods/8b956518-9768-477f-9acb-1fc3459427f7/volumes" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.417591 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d993905-0c76-454d-8eac-8a93674522db" path="/var/lib/kubelet/pods/8d993905-0c76-454d-8eac-8a93674522db/volumes" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.418230 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="93dd367f-1105-4615-9562-2d9ad648e7a9" path="/var/lib/kubelet/pods/93dd367f-1105-4615-9562-2d9ad648e7a9/volumes" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.419338 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "20ae73b5-51f4-4bcf-ba9c-c35f566cd07e" (UID: "20ae73b5-51f4-4bcf-ba9c-c35f566cd07e"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.424340 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1b7e898-ff4e-4523-8602-18d5937c3e5f" path="/var/lib/kubelet/pods/a1b7e898-ff4e-4523-8602-18d5937c3e5f/volumes" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.424958 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4227032-1b4c-4059-b91f-cf5ece6b20b2" path="/var/lib/kubelet/pods/c4227032-1b4c-4059-b91f-cf5ece6b20b2/volumes" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.425928 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d380e842-910a-443b-aa5e-151fa1fe43ea" path="/var/lib/kubelet/pods/d380e842-910a-443b-aa5e-151fa1fe43ea/volumes" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.437307 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6b096f4-483e-48c5-a3e1-a178c0c5ae6e" path="/var/lib/kubelet/pods/d6b096f4-483e-48c5-a3e1-a178c0c5ae6e/volumes" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.438097 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa30e851-f383-42c0-9e09-d8c896ed77ad" path="/var/lib/kubelet/pods/fa30e851-f383-42c0-9e09-d8c896ed77ad/volumes" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.446474 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-7bd94f978b-h9cm5"] Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.446528 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-z77c8"] Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.446544 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-z77c8"] Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.464733 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "20ae73b5-51f4-4bcf-ba9c-c35f566cd07e" (UID: "20ae73b5-51f4-4bcf-ba9c-c35f566cd07e"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.467587 4742 scope.go:117] "RemoveContainer" containerID="638f39f3b69197ade380ce0500b746ede74a9207587b901a2b07034aa6f65aef" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.476938 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.476975 4742 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.476988 4742 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.477000 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lmgd9\" (UniqueName: \"kubernetes.io/projected/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-kube-api-access-lmgd9\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.477284 4742 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.477332 4742 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.481111 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-config-data" (OuterVolumeSpecName: "config-data") pod "20ae73b5-51f4-4bcf-ba9c-c35f566cd07e" (UID: "20ae73b5-51f4-4bcf-ba9c-c35f566cd07e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.481691 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "20ae73b5-51f4-4bcf-ba9c-c35f566cd07e" (UID: "20ae73b5-51f4-4bcf-ba9c-c35f566cd07e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.489241 4742 scope.go:117] "RemoveContainer" containerID="c5027492da075e5f7eb6f411b5685735804aba663f81c22f48a7032edb179797" Dec 05 06:15:28 crc kubenswrapper[4742]: E1205 06:15:28.489650 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5027492da075e5f7eb6f411b5685735804aba663f81c22f48a7032edb179797\": container with ID starting with c5027492da075e5f7eb6f411b5685735804aba663f81c22f48a7032edb179797 not found: ID does not exist" containerID="c5027492da075e5f7eb6f411b5685735804aba663f81c22f48a7032edb179797" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.489694 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5027492da075e5f7eb6f411b5685735804aba663f81c22f48a7032edb179797"} err="failed to get container status \"c5027492da075e5f7eb6f411b5685735804aba663f81c22f48a7032edb179797\": rpc error: code = NotFound desc = could not find container \"c5027492da075e5f7eb6f411b5685735804aba663f81c22f48a7032edb179797\": container with ID starting with c5027492da075e5f7eb6f411b5685735804aba663f81c22f48a7032edb179797 not found: ID does not exist" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.489723 4742 scope.go:117] "RemoveContainer" containerID="69825182fb26d31bf0e53f36361245e9235ba3311974d988fd1997b2ec68f6cb" Dec 05 06:15:28 crc kubenswrapper[4742]: E1205 06:15:28.490073 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69825182fb26d31bf0e53f36361245e9235ba3311974d988fd1997b2ec68f6cb\": container with ID starting with 69825182fb26d31bf0e53f36361245e9235ba3311974d988fd1997b2ec68f6cb not found: ID does not exist" containerID="69825182fb26d31bf0e53f36361245e9235ba3311974d988fd1997b2ec68f6cb" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.490094 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69825182fb26d31bf0e53f36361245e9235ba3311974d988fd1997b2ec68f6cb"} err="failed to get container status \"69825182fb26d31bf0e53f36361245e9235ba3311974d988fd1997b2ec68f6cb\": rpc error: code = NotFound desc = could not find container \"69825182fb26d31bf0e53f36361245e9235ba3311974d988fd1997b2ec68f6cb\": container with ID starting with 69825182fb26d31bf0e53f36361245e9235ba3311974d988fd1997b2ec68f6cb not found: ID does not exist" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.490107 4742 scope.go:117] "RemoveContainer" containerID="638f39f3b69197ade380ce0500b746ede74a9207587b901a2b07034aa6f65aef" Dec 05 06:15:28 crc kubenswrapper[4742]: E1205 06:15:28.490396 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"638f39f3b69197ade380ce0500b746ede74a9207587b901a2b07034aa6f65aef\": container with ID starting with 638f39f3b69197ade380ce0500b746ede74a9207587b901a2b07034aa6f65aef not found: ID does not exist" containerID="638f39f3b69197ade380ce0500b746ede74a9207587b901a2b07034aa6f65aef" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.490443 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"638f39f3b69197ade380ce0500b746ede74a9207587b901a2b07034aa6f65aef"} err="failed to get container status \"638f39f3b69197ade380ce0500b746ede74a9207587b901a2b07034aa6f65aef\": rpc error: code = NotFound desc = could not find container \"638f39f3b69197ade380ce0500b746ede74a9207587b901a2b07034aa6f65aef\": container with ID starting with 638f39f3b69197ade380ce0500b746ede74a9207587b901a2b07034aa6f65aef not found: ID does not exist" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.490473 4742 scope.go:117] "RemoveContainer" containerID="2ecbc57c9c7699122ac359af92d4cd9ed8008de0c52d7893deb3493891be0b73" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.562174 4742 scope.go:117] "RemoveContainer" containerID="a39fff3b53abe8f6e2a0ba533233662c0b7fe156e685ced3f6efd8f2c79f2d9f" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.579472 4742 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.579526 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.587009 4742 scope.go:117] "RemoveContainer" containerID="4c2c76854fdd4144c4cb1d5dac7df46a88805bb483cbfd5411dfc86a014d19ea" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.607730 4742 scope.go:117] "RemoveContainer" containerID="68c5adefa894fd4e4d4330c2dd3417e387b076c9891ece3a5627225e0d0c2d97" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.627257 4742 scope.go:117] "RemoveContainer" containerID="2ecbc57c9c7699122ac359af92d4cd9ed8008de0c52d7893deb3493891be0b73" Dec 05 06:15:28 crc kubenswrapper[4742]: E1205 06:15:28.627810 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ecbc57c9c7699122ac359af92d4cd9ed8008de0c52d7893deb3493891be0b73\": container with ID starting with 2ecbc57c9c7699122ac359af92d4cd9ed8008de0c52d7893deb3493891be0b73 not found: ID does not exist" containerID="2ecbc57c9c7699122ac359af92d4cd9ed8008de0c52d7893deb3493891be0b73" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.627847 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ecbc57c9c7699122ac359af92d4cd9ed8008de0c52d7893deb3493891be0b73"} err="failed to get container status \"2ecbc57c9c7699122ac359af92d4cd9ed8008de0c52d7893deb3493891be0b73\": rpc error: code = NotFound desc = could not find container \"2ecbc57c9c7699122ac359af92d4cd9ed8008de0c52d7893deb3493891be0b73\": container with ID starting with 2ecbc57c9c7699122ac359af92d4cd9ed8008de0c52d7893deb3493891be0b73 not found: ID does not exist" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.627874 4742 scope.go:117] "RemoveContainer" containerID="a39fff3b53abe8f6e2a0ba533233662c0b7fe156e685ced3f6efd8f2c79f2d9f" Dec 05 06:15:28 crc kubenswrapper[4742]: E1205 06:15:28.628361 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a39fff3b53abe8f6e2a0ba533233662c0b7fe156e685ced3f6efd8f2c79f2d9f\": container with ID starting with a39fff3b53abe8f6e2a0ba533233662c0b7fe156e685ced3f6efd8f2c79f2d9f not found: ID does not exist" containerID="a39fff3b53abe8f6e2a0ba533233662c0b7fe156e685ced3f6efd8f2c79f2d9f" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.628395 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a39fff3b53abe8f6e2a0ba533233662c0b7fe156e685ced3f6efd8f2c79f2d9f"} err="failed to get container status \"a39fff3b53abe8f6e2a0ba533233662c0b7fe156e685ced3f6efd8f2c79f2d9f\": rpc error: code = NotFound desc = could not find container \"a39fff3b53abe8f6e2a0ba533233662c0b7fe156e685ced3f6efd8f2c79f2d9f\": container with ID starting with a39fff3b53abe8f6e2a0ba533233662c0b7fe156e685ced3f6efd8f2c79f2d9f not found: ID does not exist" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.628414 4742 scope.go:117] "RemoveContainer" containerID="4c2c76854fdd4144c4cb1d5dac7df46a88805bb483cbfd5411dfc86a014d19ea" Dec 05 06:15:28 crc kubenswrapper[4742]: E1205 06:15:28.628738 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c2c76854fdd4144c4cb1d5dac7df46a88805bb483cbfd5411dfc86a014d19ea\": container with ID starting with 4c2c76854fdd4144c4cb1d5dac7df46a88805bb483cbfd5411dfc86a014d19ea not found: ID does not exist" containerID="4c2c76854fdd4144c4cb1d5dac7df46a88805bb483cbfd5411dfc86a014d19ea" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.628768 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c2c76854fdd4144c4cb1d5dac7df46a88805bb483cbfd5411dfc86a014d19ea"} err="failed to get container status \"4c2c76854fdd4144c4cb1d5dac7df46a88805bb483cbfd5411dfc86a014d19ea\": rpc error: code = NotFound desc = could not find container \"4c2c76854fdd4144c4cb1d5dac7df46a88805bb483cbfd5411dfc86a014d19ea\": container with ID starting with 4c2c76854fdd4144c4cb1d5dac7df46a88805bb483cbfd5411dfc86a014d19ea not found: ID does not exist" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.628786 4742 scope.go:117] "RemoveContainer" containerID="68c5adefa894fd4e4d4330c2dd3417e387b076c9891ece3a5627225e0d0c2d97" Dec 05 06:15:28 crc kubenswrapper[4742]: E1205 06:15:28.629099 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68c5adefa894fd4e4d4330c2dd3417e387b076c9891ece3a5627225e0d0c2d97\": container with ID starting with 68c5adefa894fd4e4d4330c2dd3417e387b076c9891ece3a5627225e0d0c2d97 not found: ID does not exist" containerID="68c5adefa894fd4e4d4330c2dd3417e387b076c9891ece3a5627225e0d0c2d97" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.629129 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68c5adefa894fd4e4d4330c2dd3417e387b076c9891ece3a5627225e0d0c2d97"} err="failed to get container status \"68c5adefa894fd4e4d4330c2dd3417e387b076c9891ece3a5627225e0d0c2d97\": rpc error: code = NotFound desc = could not find container \"68c5adefa894fd4e4d4330c2dd3417e387b076c9891ece3a5627225e0d0c2d97\": container with ID starting with 68c5adefa894fd4e4d4330c2dd3417e387b076c9891ece3a5627225e0d0c2d97 not found: ID does not exist" Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.686190 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:15:28 crc kubenswrapper[4742]: I1205 06:15:28.702693 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.326764 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-574f89688c-hbh7m" Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.377534 4742 generic.go:334] "Generic (PLEG): container finished" podID="65eece87-4279-4d6c-b2a6-5841fd5b3298" containerID="0f5251869335092c27ec478b7850c0b761a69b72c601c2774db88367eee9ef81" exitCode=0 Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.377584 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-574f89688c-hbh7m" Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.377627 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-574f89688c-hbh7m" event={"ID":"65eece87-4279-4d6c-b2a6-5841fd5b3298","Type":"ContainerDied","Data":"0f5251869335092c27ec478b7850c0b761a69b72c601c2774db88367eee9ef81"} Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.378182 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-574f89688c-hbh7m" event={"ID":"65eece87-4279-4d6c-b2a6-5841fd5b3298","Type":"ContainerDied","Data":"7dde166a73d5b76e2abad149fb208a207bbfb6c4d3f85860b86c5eececd4c718"} Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.378209 4742 scope.go:117] "RemoveContainer" containerID="c3a0400780874ea1469dfe2dbe4742008a4997fce008a566e72b6e1f3a0d759f" Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.399614 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20ae73b5-51f4-4bcf-ba9c-c35f566cd07e" path="/var/lib/kubelet/pods/20ae73b5-51f4-4bcf-ba9c-c35f566cd07e/volumes" Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.401095 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f4c3ae5-d78c-4ddb-953d-cbee5b815be9" path="/var/lib/kubelet/pods/2f4c3ae5-d78c-4ddb-953d-cbee5b815be9/volumes" Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.401888 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="439adfba-ae29-4db5-8a77-88eede9d0bd9" path="/var/lib/kubelet/pods/439adfba-ae29-4db5-8a77-88eede9d0bd9/volumes" Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.410280 4742 scope.go:117] "RemoveContainer" containerID="0f5251869335092c27ec478b7850c0b761a69b72c601c2774db88367eee9ef81" Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.418002 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-ovndb-tls-certs\") pod \"65eece87-4279-4d6c-b2a6-5841fd5b3298\" (UID: \"65eece87-4279-4d6c-b2a6-5841fd5b3298\") " Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.418170 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-combined-ca-bundle\") pod \"65eece87-4279-4d6c-b2a6-5841fd5b3298\" (UID: \"65eece87-4279-4d6c-b2a6-5841fd5b3298\") " Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.418205 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-httpd-config\") pod \"65eece87-4279-4d6c-b2a6-5841fd5b3298\" (UID: \"65eece87-4279-4d6c-b2a6-5841fd5b3298\") " Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.418256 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-public-tls-certs\") pod \"65eece87-4279-4d6c-b2a6-5841fd5b3298\" (UID: \"65eece87-4279-4d6c-b2a6-5841fd5b3298\") " Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.418296 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-config\") pod \"65eece87-4279-4d6c-b2a6-5841fd5b3298\" (UID: \"65eece87-4279-4d6c-b2a6-5841fd5b3298\") " Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.418329 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qw9n4\" (UniqueName: \"kubernetes.io/projected/65eece87-4279-4d6c-b2a6-5841fd5b3298-kube-api-access-qw9n4\") pod \"65eece87-4279-4d6c-b2a6-5841fd5b3298\" (UID: \"65eece87-4279-4d6c-b2a6-5841fd5b3298\") " Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.418356 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-internal-tls-certs\") pod \"65eece87-4279-4d6c-b2a6-5841fd5b3298\" (UID: \"65eece87-4279-4d6c-b2a6-5841fd5b3298\") " Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.423697 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "65eece87-4279-4d6c-b2a6-5841fd5b3298" (UID: "65eece87-4279-4d6c-b2a6-5841fd5b3298"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.423910 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65eece87-4279-4d6c-b2a6-5841fd5b3298-kube-api-access-qw9n4" (OuterVolumeSpecName: "kube-api-access-qw9n4") pod "65eece87-4279-4d6c-b2a6-5841fd5b3298" (UID: "65eece87-4279-4d6c-b2a6-5841fd5b3298"). InnerVolumeSpecName "kube-api-access-qw9n4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.432019 4742 scope.go:117] "RemoveContainer" containerID="c3a0400780874ea1469dfe2dbe4742008a4997fce008a566e72b6e1f3a0d759f" Dec 05 06:15:30 crc kubenswrapper[4742]: E1205 06:15:30.432657 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3a0400780874ea1469dfe2dbe4742008a4997fce008a566e72b6e1f3a0d759f\": container with ID starting with c3a0400780874ea1469dfe2dbe4742008a4997fce008a566e72b6e1f3a0d759f not found: ID does not exist" containerID="c3a0400780874ea1469dfe2dbe4742008a4997fce008a566e72b6e1f3a0d759f" Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.432709 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3a0400780874ea1469dfe2dbe4742008a4997fce008a566e72b6e1f3a0d759f"} err="failed to get container status \"c3a0400780874ea1469dfe2dbe4742008a4997fce008a566e72b6e1f3a0d759f\": rpc error: code = NotFound desc = could not find container \"c3a0400780874ea1469dfe2dbe4742008a4997fce008a566e72b6e1f3a0d759f\": container with ID starting with c3a0400780874ea1469dfe2dbe4742008a4997fce008a566e72b6e1f3a0d759f not found: ID does not exist" Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.432813 4742 scope.go:117] "RemoveContainer" containerID="0f5251869335092c27ec478b7850c0b761a69b72c601c2774db88367eee9ef81" Dec 05 06:15:30 crc kubenswrapper[4742]: E1205 06:15:30.433255 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f5251869335092c27ec478b7850c0b761a69b72c601c2774db88367eee9ef81\": container with ID starting with 0f5251869335092c27ec478b7850c0b761a69b72c601c2774db88367eee9ef81 not found: ID does not exist" containerID="0f5251869335092c27ec478b7850c0b761a69b72c601c2774db88367eee9ef81" Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.433294 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f5251869335092c27ec478b7850c0b761a69b72c601c2774db88367eee9ef81"} err="failed to get container status \"0f5251869335092c27ec478b7850c0b761a69b72c601c2774db88367eee9ef81\": rpc error: code = NotFound desc = could not find container \"0f5251869335092c27ec478b7850c0b761a69b72c601c2774db88367eee9ef81\": container with ID starting with 0f5251869335092c27ec478b7850c0b761a69b72c601c2774db88367eee9ef81 not found: ID does not exist" Dec 05 06:15:30 crc kubenswrapper[4742]: E1205 06:15:30.446410 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8 is running failed: container process not found" containerID="56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 06:15:30 crc kubenswrapper[4742]: E1205 06:15:30.447081 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8 is running failed: container process not found" containerID="56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 06:15:30 crc kubenswrapper[4742]: E1205 06:15:30.447775 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8 is running failed: container process not found" containerID="56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 06:15:30 crc kubenswrapper[4742]: E1205 06:15:30.447850 4742 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-tgnp6" podUID="504b6b10-062b-4d3c-8202-fcfd97bc57aa" containerName="ovsdb-server" Dec 05 06:15:30 crc kubenswrapper[4742]: E1205 06:15:30.448041 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2fe19f91420a01210a21d4371d649e6ddfbc92c7b68947c6b76d971d5d43a7cc" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 06:15:30 crc kubenswrapper[4742]: E1205 06:15:30.451268 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2fe19f91420a01210a21d4371d649e6ddfbc92c7b68947c6b76d971d5d43a7cc" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 06:15:30 crc kubenswrapper[4742]: E1205 06:15:30.453094 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2fe19f91420a01210a21d4371d649e6ddfbc92c7b68947c6b76d971d5d43a7cc" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 06:15:30 crc kubenswrapper[4742]: E1205 06:15:30.453164 4742 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-tgnp6" podUID="504b6b10-062b-4d3c-8202-fcfd97bc57aa" containerName="ovs-vswitchd" Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.472502 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "65eece87-4279-4d6c-b2a6-5841fd5b3298" (UID: "65eece87-4279-4d6c-b2a6-5841fd5b3298"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.485458 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "65eece87-4279-4d6c-b2a6-5841fd5b3298" (UID: "65eece87-4279-4d6c-b2a6-5841fd5b3298"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.487774 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "65eece87-4279-4d6c-b2a6-5841fd5b3298" (UID: "65eece87-4279-4d6c-b2a6-5841fd5b3298"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.490628 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-config" (OuterVolumeSpecName: "config") pod "65eece87-4279-4d6c-b2a6-5841fd5b3298" (UID: "65eece87-4279-4d6c-b2a6-5841fd5b3298"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.508551 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "65eece87-4279-4d6c-b2a6-5841fd5b3298" (UID: "65eece87-4279-4d6c-b2a6-5841fd5b3298"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.520542 4742 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.520565 4742 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.520576 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qw9n4\" (UniqueName: \"kubernetes.io/projected/65eece87-4279-4d6c-b2a6-5841fd5b3298-kube-api-access-qw9n4\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.520586 4742 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.520595 4742 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.520603 4742 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.520611 4742 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/65eece87-4279-4d6c-b2a6-5841fd5b3298-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.725343 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-574f89688c-hbh7m"] Dec 05 06:15:30 crc kubenswrapper[4742]: I1205 06:15:30.736590 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-574f89688c-hbh7m"] Dec 05 06:15:32 crc kubenswrapper[4742]: I1205 06:15:32.406556 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65eece87-4279-4d6c-b2a6-5841fd5b3298" path="/var/lib/kubelet/pods/65eece87-4279-4d6c-b2a6-5841fd5b3298/volumes" Dec 05 06:15:35 crc kubenswrapper[4742]: E1205 06:15:35.442584 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8 is running failed: container process not found" containerID="56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 06:15:35 crc kubenswrapper[4742]: E1205 06:15:35.443711 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8 is running failed: container process not found" containerID="56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 06:15:35 crc kubenswrapper[4742]: E1205 06:15:35.444675 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8 is running failed: container process not found" containerID="56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 06:15:35 crc kubenswrapper[4742]: E1205 06:15:35.444826 4742 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-tgnp6" podUID="504b6b10-062b-4d3c-8202-fcfd97bc57aa" containerName="ovsdb-server" Dec 05 06:15:35 crc kubenswrapper[4742]: E1205 06:15:35.445838 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2fe19f91420a01210a21d4371d649e6ddfbc92c7b68947c6b76d971d5d43a7cc" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 06:15:35 crc kubenswrapper[4742]: E1205 06:15:35.453746 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2fe19f91420a01210a21d4371d649e6ddfbc92c7b68947c6b76d971d5d43a7cc" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 06:15:35 crc kubenswrapper[4742]: E1205 06:15:35.455633 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2fe19f91420a01210a21d4371d649e6ddfbc92c7b68947c6b76d971d5d43a7cc" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 06:15:35 crc kubenswrapper[4742]: E1205 06:15:35.455704 4742 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-tgnp6" podUID="504b6b10-062b-4d3c-8202-fcfd97bc57aa" containerName="ovs-vswitchd" Dec 05 06:15:40 crc kubenswrapper[4742]: E1205 06:15:40.443458 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8 is running failed: container process not found" containerID="56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 06:15:40 crc kubenswrapper[4742]: E1205 06:15:40.444504 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8 is running failed: container process not found" containerID="56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 06:15:40 crc kubenswrapper[4742]: E1205 06:15:40.444993 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8 is running failed: container process not found" containerID="56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 06:15:40 crc kubenswrapper[4742]: E1205 06:15:40.445038 4742 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-tgnp6" podUID="504b6b10-062b-4d3c-8202-fcfd97bc57aa" containerName="ovsdb-server" Dec 05 06:15:40 crc kubenswrapper[4742]: E1205 06:15:40.445388 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2fe19f91420a01210a21d4371d649e6ddfbc92c7b68947c6b76d971d5d43a7cc" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 06:15:40 crc kubenswrapper[4742]: E1205 06:15:40.447207 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2fe19f91420a01210a21d4371d649e6ddfbc92c7b68947c6b76d971d5d43a7cc" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 06:15:40 crc kubenswrapper[4742]: E1205 06:15:40.448909 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2fe19f91420a01210a21d4371d649e6ddfbc92c7b68947c6b76d971d5d43a7cc" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 06:15:40 crc kubenswrapper[4742]: E1205 06:15:40.448979 4742 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-tgnp6" podUID="504b6b10-062b-4d3c-8202-fcfd97bc57aa" containerName="ovs-vswitchd" Dec 05 06:15:45 crc kubenswrapper[4742]: E1205 06:15:45.444696 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8 is running failed: container process not found" containerID="56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 06:15:45 crc kubenswrapper[4742]: E1205 06:15:45.445544 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8 is running failed: container process not found" containerID="56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 06:15:45 crc kubenswrapper[4742]: E1205 06:15:45.445847 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2fe19f91420a01210a21d4371d649e6ddfbc92c7b68947c6b76d971d5d43a7cc" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 06:15:45 crc kubenswrapper[4742]: E1205 06:15:45.446121 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8 is running failed: container process not found" containerID="56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Dec 05 06:15:45 crc kubenswrapper[4742]: E1205 06:15:45.446189 4742 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-tgnp6" podUID="504b6b10-062b-4d3c-8202-fcfd97bc57aa" containerName="ovsdb-server" Dec 05 06:15:45 crc kubenswrapper[4742]: E1205 06:15:45.448070 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2fe19f91420a01210a21d4371d649e6ddfbc92c7b68947c6b76d971d5d43a7cc" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 06:15:45 crc kubenswrapper[4742]: E1205 06:15:45.450978 4742 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2fe19f91420a01210a21d4371d649e6ddfbc92c7b68947c6b76d971d5d43a7cc" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Dec 05 06:15:45 crc kubenswrapper[4742]: E1205 06:15:45.451075 4742 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-tgnp6" podUID="504b6b10-062b-4d3c-8202-fcfd97bc57aa" containerName="ovs-vswitchd" Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.570791 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-tgnp6_504b6b10-062b-4d3c-8202-fcfd97bc57aa/ovs-vswitchd/0.log" Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.572952 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-tgnp6" Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.586877 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-tgnp6_504b6b10-062b-4d3c-8202-fcfd97bc57aa/ovs-vswitchd/0.log" Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.587876 4742 generic.go:334] "Generic (PLEG): container finished" podID="504b6b10-062b-4d3c-8202-fcfd97bc57aa" containerID="2fe19f91420a01210a21d4371d649e6ddfbc92c7b68947c6b76d971d5d43a7cc" exitCode=137 Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.587909 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-tgnp6" event={"ID":"504b6b10-062b-4d3c-8202-fcfd97bc57aa","Type":"ContainerDied","Data":"2fe19f91420a01210a21d4371d649e6ddfbc92c7b68947c6b76d971d5d43a7cc"} Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.587948 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-tgnp6" event={"ID":"504b6b10-062b-4d3c-8202-fcfd97bc57aa","Type":"ContainerDied","Data":"57559d840c8bf91e31b5c51ae2cd65e01b1158303c7d096915810505d014966a"} Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.587970 4742 scope.go:117] "RemoveContainer" containerID="2fe19f91420a01210a21d4371d649e6ddfbc92c7b68947c6b76d971d5d43a7cc" Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.587984 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-tgnp6" Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.616322 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/504b6b10-062b-4d3c-8202-fcfd97bc57aa-scripts\") pod \"504b6b10-062b-4d3c-8202-fcfd97bc57aa\" (UID: \"504b6b10-062b-4d3c-8202-fcfd97bc57aa\") " Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.616392 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/504b6b10-062b-4d3c-8202-fcfd97bc57aa-var-run\") pod \"504b6b10-062b-4d3c-8202-fcfd97bc57aa\" (UID: \"504b6b10-062b-4d3c-8202-fcfd97bc57aa\") " Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.616441 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/504b6b10-062b-4d3c-8202-fcfd97bc57aa-etc-ovs\") pod \"504b6b10-062b-4d3c-8202-fcfd97bc57aa\" (UID: \"504b6b10-062b-4d3c-8202-fcfd97bc57aa\") " Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.616483 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/504b6b10-062b-4d3c-8202-fcfd97bc57aa-var-lib\") pod \"504b6b10-062b-4d3c-8202-fcfd97bc57aa\" (UID: \"504b6b10-062b-4d3c-8202-fcfd97bc57aa\") " Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.616537 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/504b6b10-062b-4d3c-8202-fcfd97bc57aa-var-log\") pod \"504b6b10-062b-4d3c-8202-fcfd97bc57aa\" (UID: \"504b6b10-062b-4d3c-8202-fcfd97bc57aa\") " Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.616564 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxmhq\" (UniqueName: \"kubernetes.io/projected/504b6b10-062b-4d3c-8202-fcfd97bc57aa-kube-api-access-xxmhq\") pod \"504b6b10-062b-4d3c-8202-fcfd97bc57aa\" (UID: \"504b6b10-062b-4d3c-8202-fcfd97bc57aa\") " Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.617082 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/504b6b10-062b-4d3c-8202-fcfd97bc57aa-etc-ovs" (OuterVolumeSpecName: "etc-ovs") pod "504b6b10-062b-4d3c-8202-fcfd97bc57aa" (UID: "504b6b10-062b-4d3c-8202-fcfd97bc57aa"). InnerVolumeSpecName "etc-ovs". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.617201 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/504b6b10-062b-4d3c-8202-fcfd97bc57aa-var-run" (OuterVolumeSpecName: "var-run") pod "504b6b10-062b-4d3c-8202-fcfd97bc57aa" (UID: "504b6b10-062b-4d3c-8202-fcfd97bc57aa"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.617260 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/504b6b10-062b-4d3c-8202-fcfd97bc57aa-var-lib" (OuterVolumeSpecName: "var-lib") pod "504b6b10-062b-4d3c-8202-fcfd97bc57aa" (UID: "504b6b10-062b-4d3c-8202-fcfd97bc57aa"). InnerVolumeSpecName "var-lib". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.617316 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/504b6b10-062b-4d3c-8202-fcfd97bc57aa-var-log" (OuterVolumeSpecName: "var-log") pod "504b6b10-062b-4d3c-8202-fcfd97bc57aa" (UID: "504b6b10-062b-4d3c-8202-fcfd97bc57aa"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.619779 4742 scope.go:117] "RemoveContainer" containerID="56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8" Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.620364 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/504b6b10-062b-4d3c-8202-fcfd97bc57aa-scripts" (OuterVolumeSpecName: "scripts") pod "504b6b10-062b-4d3c-8202-fcfd97bc57aa" (UID: "504b6b10-062b-4d3c-8202-fcfd97bc57aa"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.624491 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/504b6b10-062b-4d3c-8202-fcfd97bc57aa-kube-api-access-xxmhq" (OuterVolumeSpecName: "kube-api-access-xxmhq") pod "504b6b10-062b-4d3c-8202-fcfd97bc57aa" (UID: "504b6b10-062b-4d3c-8202-fcfd97bc57aa"). InnerVolumeSpecName "kube-api-access-xxmhq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.671607 4742 scope.go:117] "RemoveContainer" containerID="652a22f9d708ac555b3ac5eb928fa8fe7dbbb5c68552672921a0164adc81ce23" Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.706675 4742 scope.go:117] "RemoveContainer" containerID="2fe19f91420a01210a21d4371d649e6ddfbc92c7b68947c6b76d971d5d43a7cc" Dec 05 06:15:47 crc kubenswrapper[4742]: E1205 06:15:47.707217 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2fe19f91420a01210a21d4371d649e6ddfbc92c7b68947c6b76d971d5d43a7cc\": container with ID starting with 2fe19f91420a01210a21d4371d649e6ddfbc92c7b68947c6b76d971d5d43a7cc not found: ID does not exist" containerID="2fe19f91420a01210a21d4371d649e6ddfbc92c7b68947c6b76d971d5d43a7cc" Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.707454 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2fe19f91420a01210a21d4371d649e6ddfbc92c7b68947c6b76d971d5d43a7cc"} err="failed to get container status \"2fe19f91420a01210a21d4371d649e6ddfbc92c7b68947c6b76d971d5d43a7cc\": rpc error: code = NotFound desc = could not find container \"2fe19f91420a01210a21d4371d649e6ddfbc92c7b68947c6b76d971d5d43a7cc\": container with ID starting with 2fe19f91420a01210a21d4371d649e6ddfbc92c7b68947c6b76d971d5d43a7cc not found: ID does not exist" Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.707510 4742 scope.go:117] "RemoveContainer" containerID="56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8" Dec 05 06:15:47 crc kubenswrapper[4742]: E1205 06:15:47.708091 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8\": container with ID starting with 56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8 not found: ID does not exist" containerID="56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8" Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.708122 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8"} err="failed to get container status \"56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8\": rpc error: code = NotFound desc = could not find container \"56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8\": container with ID starting with 56f35e7ebe54c907c70849759e7d8b29133e5ef246b88ed3b691db524bbe17e8 not found: ID does not exist" Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.708139 4742 scope.go:117] "RemoveContainer" containerID="652a22f9d708ac555b3ac5eb928fa8fe7dbbb5c68552672921a0164adc81ce23" Dec 05 06:15:47 crc kubenswrapper[4742]: E1205 06:15:47.708465 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"652a22f9d708ac555b3ac5eb928fa8fe7dbbb5c68552672921a0164adc81ce23\": container with ID starting with 652a22f9d708ac555b3ac5eb928fa8fe7dbbb5c68552672921a0164adc81ce23 not found: ID does not exist" containerID="652a22f9d708ac555b3ac5eb928fa8fe7dbbb5c68552672921a0164adc81ce23" Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.708488 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"652a22f9d708ac555b3ac5eb928fa8fe7dbbb5c68552672921a0164adc81ce23"} err="failed to get container status \"652a22f9d708ac555b3ac5eb928fa8fe7dbbb5c68552672921a0164adc81ce23\": rpc error: code = NotFound desc = could not find container \"652a22f9d708ac555b3ac5eb928fa8fe7dbbb5c68552672921a0164adc81ce23\": container with ID starting with 652a22f9d708ac555b3ac5eb928fa8fe7dbbb5c68552672921a0164adc81ce23 not found: ID does not exist" Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.718339 4742 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/504b6b10-062b-4d3c-8202-fcfd97bc57aa-var-run\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.718405 4742 reconciler_common.go:293] "Volume detached for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/504b6b10-062b-4d3c-8202-fcfd97bc57aa-etc-ovs\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.718418 4742 reconciler_common.go:293] "Volume detached for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/504b6b10-062b-4d3c-8202-fcfd97bc57aa-var-lib\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.718432 4742 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/504b6b10-062b-4d3c-8202-fcfd97bc57aa-var-log\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.718445 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxmhq\" (UniqueName: \"kubernetes.io/projected/504b6b10-062b-4d3c-8202-fcfd97bc57aa-kube-api-access-xxmhq\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.718473 4742 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/504b6b10-062b-4d3c-8202-fcfd97bc57aa-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.929207 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-tgnp6"] Dec 05 06:15:47 crc kubenswrapper[4742]: I1205 06:15:47.937558 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-ovs-tgnp6"] Dec 05 06:15:48 crc kubenswrapper[4742]: I1205 06:15:48.397908 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="504b6b10-062b-4d3c-8202-fcfd97bc57aa" path="/var/lib/kubelet/pods/504b6b10-062b-4d3c-8202-fcfd97bc57aa/volumes" Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.084927 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.142034 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b\" (UID: \"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b\") " Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.142189 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vspzr\" (UniqueName: \"kubernetes.io/projected/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-kube-api-access-vspzr\") pod \"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b\" (UID: \"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b\") " Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.142264 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-lock\") pod \"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b\" (UID: \"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b\") " Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.142347 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-etc-swift\") pod \"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b\" (UID: \"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b\") " Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.142420 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-cache\") pod \"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b\" (UID: \"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b\") " Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.143517 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-cache" (OuterVolumeSpecName: "cache") pod "f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" (UID: "f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.144256 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-lock" (OuterVolumeSpecName: "lock") pod "f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" (UID: "f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.147403 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" (UID: "f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.148350 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "swift") pod "f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" (UID: "f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.150950 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-kube-api-access-vspzr" (OuterVolumeSpecName: "kube-api-access-vspzr") pod "f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" (UID: "f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b"). InnerVolumeSpecName "kube-api-access-vspzr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.243889 4742 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.243926 4742 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-cache\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.243994 4742 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.244010 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vspzr\" (UniqueName: \"kubernetes.io/projected/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-kube-api-access-vspzr\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.244024 4742 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b-lock\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.258597 4742 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.345986 4742 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.621776 4742 generic.go:334] "Generic (PLEG): container finished" podID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerID="23080c098e9241c0fbef2d16834563f5be32e05ed6a93235162276519247c330" exitCode=137 Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.621844 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b","Type":"ContainerDied","Data":"23080c098e9241c0fbef2d16834563f5be32e05ed6a93235162276519247c330"} Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.621883 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b","Type":"ContainerDied","Data":"b6d1619bc68a6a3e38a75cf2569fbb323e55a9bf3056bb602d712f91b78b7a22"} Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.621913 4742 scope.go:117] "RemoveContainer" containerID="23080c098e9241c0fbef2d16834563f5be32e05ed6a93235162276519247c330" Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.622112 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.654834 4742 scope.go:117] "RemoveContainer" containerID="402a519ada8012b4c837384aa46b5de9d5a53090de81a4bfd5fca5e66afd80ab" Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.676852 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.682759 4742 scope.go:117] "RemoveContainer" containerID="83482a05302b7d016da2098260530a40bbabcc0dd30bbee8e001d56649d1fa10" Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.684117 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-storage-0"] Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.708324 4742 scope.go:117] "RemoveContainer" containerID="583bc9aead517370de5511bc87f2ce12fd00f5d749568164aaf7dd9550bed55d" Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.734898 4742 scope.go:117] "RemoveContainer" containerID="c8e35cc4fdc899da4c083432abcbef5e1ad92a9b95d3984cab5952bb33e1b375" Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.756097 4742 scope.go:117] "RemoveContainer" containerID="76a0ea55014e165c32a9608cf728f086e8082a97b97de1dd0fdb8cb27caa0a7e" Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.785525 4742 scope.go:117] "RemoveContainer" containerID="89f787d3dcbe6d0e4ec54aa9195f1fe45b50844797a04c3b326966f17201a671" Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.809176 4742 scope.go:117] "RemoveContainer" containerID="10dd23759afdac3e2bd7b9f5ad1e8df111f57ac5da85f46f8da24ff04f9269b3" Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.836275 4742 scope.go:117] "RemoveContainer" containerID="a0061dd4be94377433ec372b482fc15e1700f814f12276d54dcb9692d64d5aab" Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.868125 4742 scope.go:117] "RemoveContainer" containerID="0eaa57a7d1edb43a21fc4813afcd8bd8362171ef4a64691cceb98492dc6baccb" Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.892999 4742 scope.go:117] "RemoveContainer" containerID="9b48fcbbeeb0dfa6813285a8982e885fd781741008cdbdef2351e5277caa44d7" Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.915651 4742 scope.go:117] "RemoveContainer" containerID="f326ad43ebacf85ccc332a8b35e3c1fcca45c28c2a4b229df2457a4983e5ac64" Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.934233 4742 scope.go:117] "RemoveContainer" containerID="a572a48ba1392fc5a8267bbf98db108fc184bd4381327a56c58c1ab6e32e931f" Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.954686 4742 scope.go:117] "RemoveContainer" containerID="c5fb081f824bef8d38c8af954df8461ee3d95ecdcf24abe5f14ab12b9b79eaaf" Dec 05 06:15:49 crc kubenswrapper[4742]: I1205 06:15:49.990009 4742 scope.go:117] "RemoveContainer" containerID="662fdbccb819aa757a5eacbc682c4c9d90ae7096d453acac04fbb8d2c2d724e9" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.012489 4742 scope.go:117] "RemoveContainer" containerID="23080c098e9241c0fbef2d16834563f5be32e05ed6a93235162276519247c330" Dec 05 06:15:50 crc kubenswrapper[4742]: E1205 06:15:50.013173 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"23080c098e9241c0fbef2d16834563f5be32e05ed6a93235162276519247c330\": container with ID starting with 23080c098e9241c0fbef2d16834563f5be32e05ed6a93235162276519247c330 not found: ID does not exist" containerID="23080c098e9241c0fbef2d16834563f5be32e05ed6a93235162276519247c330" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.013612 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23080c098e9241c0fbef2d16834563f5be32e05ed6a93235162276519247c330"} err="failed to get container status \"23080c098e9241c0fbef2d16834563f5be32e05ed6a93235162276519247c330\": rpc error: code = NotFound desc = could not find container \"23080c098e9241c0fbef2d16834563f5be32e05ed6a93235162276519247c330\": container with ID starting with 23080c098e9241c0fbef2d16834563f5be32e05ed6a93235162276519247c330 not found: ID does not exist" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.013658 4742 scope.go:117] "RemoveContainer" containerID="402a519ada8012b4c837384aa46b5de9d5a53090de81a4bfd5fca5e66afd80ab" Dec 05 06:15:50 crc kubenswrapper[4742]: E1205 06:15:50.014203 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"402a519ada8012b4c837384aa46b5de9d5a53090de81a4bfd5fca5e66afd80ab\": container with ID starting with 402a519ada8012b4c837384aa46b5de9d5a53090de81a4bfd5fca5e66afd80ab not found: ID does not exist" containerID="402a519ada8012b4c837384aa46b5de9d5a53090de81a4bfd5fca5e66afd80ab" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.014265 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"402a519ada8012b4c837384aa46b5de9d5a53090de81a4bfd5fca5e66afd80ab"} err="failed to get container status \"402a519ada8012b4c837384aa46b5de9d5a53090de81a4bfd5fca5e66afd80ab\": rpc error: code = NotFound desc = could not find container \"402a519ada8012b4c837384aa46b5de9d5a53090de81a4bfd5fca5e66afd80ab\": container with ID starting with 402a519ada8012b4c837384aa46b5de9d5a53090de81a4bfd5fca5e66afd80ab not found: ID does not exist" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.014298 4742 scope.go:117] "RemoveContainer" containerID="83482a05302b7d016da2098260530a40bbabcc0dd30bbee8e001d56649d1fa10" Dec 05 06:15:50 crc kubenswrapper[4742]: E1205 06:15:50.014856 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"83482a05302b7d016da2098260530a40bbabcc0dd30bbee8e001d56649d1fa10\": container with ID starting with 83482a05302b7d016da2098260530a40bbabcc0dd30bbee8e001d56649d1fa10 not found: ID does not exist" containerID="83482a05302b7d016da2098260530a40bbabcc0dd30bbee8e001d56649d1fa10" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.014892 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83482a05302b7d016da2098260530a40bbabcc0dd30bbee8e001d56649d1fa10"} err="failed to get container status \"83482a05302b7d016da2098260530a40bbabcc0dd30bbee8e001d56649d1fa10\": rpc error: code = NotFound desc = could not find container \"83482a05302b7d016da2098260530a40bbabcc0dd30bbee8e001d56649d1fa10\": container with ID starting with 83482a05302b7d016da2098260530a40bbabcc0dd30bbee8e001d56649d1fa10 not found: ID does not exist" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.014925 4742 scope.go:117] "RemoveContainer" containerID="583bc9aead517370de5511bc87f2ce12fd00f5d749568164aaf7dd9550bed55d" Dec 05 06:15:50 crc kubenswrapper[4742]: E1205 06:15:50.015293 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"583bc9aead517370de5511bc87f2ce12fd00f5d749568164aaf7dd9550bed55d\": container with ID starting with 583bc9aead517370de5511bc87f2ce12fd00f5d749568164aaf7dd9550bed55d not found: ID does not exist" containerID="583bc9aead517370de5511bc87f2ce12fd00f5d749568164aaf7dd9550bed55d" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.015334 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"583bc9aead517370de5511bc87f2ce12fd00f5d749568164aaf7dd9550bed55d"} err="failed to get container status \"583bc9aead517370de5511bc87f2ce12fd00f5d749568164aaf7dd9550bed55d\": rpc error: code = NotFound desc = could not find container \"583bc9aead517370de5511bc87f2ce12fd00f5d749568164aaf7dd9550bed55d\": container with ID starting with 583bc9aead517370de5511bc87f2ce12fd00f5d749568164aaf7dd9550bed55d not found: ID does not exist" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.015362 4742 scope.go:117] "RemoveContainer" containerID="c8e35cc4fdc899da4c083432abcbef5e1ad92a9b95d3984cab5952bb33e1b375" Dec 05 06:15:50 crc kubenswrapper[4742]: E1205 06:15:50.015670 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8e35cc4fdc899da4c083432abcbef5e1ad92a9b95d3984cab5952bb33e1b375\": container with ID starting with c8e35cc4fdc899da4c083432abcbef5e1ad92a9b95d3984cab5952bb33e1b375 not found: ID does not exist" containerID="c8e35cc4fdc899da4c083432abcbef5e1ad92a9b95d3984cab5952bb33e1b375" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.015722 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8e35cc4fdc899da4c083432abcbef5e1ad92a9b95d3984cab5952bb33e1b375"} err="failed to get container status \"c8e35cc4fdc899da4c083432abcbef5e1ad92a9b95d3984cab5952bb33e1b375\": rpc error: code = NotFound desc = could not find container \"c8e35cc4fdc899da4c083432abcbef5e1ad92a9b95d3984cab5952bb33e1b375\": container with ID starting with c8e35cc4fdc899da4c083432abcbef5e1ad92a9b95d3984cab5952bb33e1b375 not found: ID does not exist" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.015757 4742 scope.go:117] "RemoveContainer" containerID="76a0ea55014e165c32a9608cf728f086e8082a97b97de1dd0fdb8cb27caa0a7e" Dec 05 06:15:50 crc kubenswrapper[4742]: E1205 06:15:50.016003 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76a0ea55014e165c32a9608cf728f086e8082a97b97de1dd0fdb8cb27caa0a7e\": container with ID starting with 76a0ea55014e165c32a9608cf728f086e8082a97b97de1dd0fdb8cb27caa0a7e not found: ID does not exist" containerID="76a0ea55014e165c32a9608cf728f086e8082a97b97de1dd0fdb8cb27caa0a7e" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.016039 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76a0ea55014e165c32a9608cf728f086e8082a97b97de1dd0fdb8cb27caa0a7e"} err="failed to get container status \"76a0ea55014e165c32a9608cf728f086e8082a97b97de1dd0fdb8cb27caa0a7e\": rpc error: code = NotFound desc = could not find container \"76a0ea55014e165c32a9608cf728f086e8082a97b97de1dd0fdb8cb27caa0a7e\": container with ID starting with 76a0ea55014e165c32a9608cf728f086e8082a97b97de1dd0fdb8cb27caa0a7e not found: ID does not exist" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.016082 4742 scope.go:117] "RemoveContainer" containerID="89f787d3dcbe6d0e4ec54aa9195f1fe45b50844797a04c3b326966f17201a671" Dec 05 06:15:50 crc kubenswrapper[4742]: E1205 06:15:50.016391 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89f787d3dcbe6d0e4ec54aa9195f1fe45b50844797a04c3b326966f17201a671\": container with ID starting with 89f787d3dcbe6d0e4ec54aa9195f1fe45b50844797a04c3b326966f17201a671 not found: ID does not exist" containerID="89f787d3dcbe6d0e4ec54aa9195f1fe45b50844797a04c3b326966f17201a671" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.016427 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89f787d3dcbe6d0e4ec54aa9195f1fe45b50844797a04c3b326966f17201a671"} err="failed to get container status \"89f787d3dcbe6d0e4ec54aa9195f1fe45b50844797a04c3b326966f17201a671\": rpc error: code = NotFound desc = could not find container \"89f787d3dcbe6d0e4ec54aa9195f1fe45b50844797a04c3b326966f17201a671\": container with ID starting with 89f787d3dcbe6d0e4ec54aa9195f1fe45b50844797a04c3b326966f17201a671 not found: ID does not exist" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.016449 4742 scope.go:117] "RemoveContainer" containerID="10dd23759afdac3e2bd7b9f5ad1e8df111f57ac5da85f46f8da24ff04f9269b3" Dec 05 06:15:50 crc kubenswrapper[4742]: E1205 06:15:50.016760 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10dd23759afdac3e2bd7b9f5ad1e8df111f57ac5da85f46f8da24ff04f9269b3\": container with ID starting with 10dd23759afdac3e2bd7b9f5ad1e8df111f57ac5da85f46f8da24ff04f9269b3 not found: ID does not exist" containerID="10dd23759afdac3e2bd7b9f5ad1e8df111f57ac5da85f46f8da24ff04f9269b3" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.016831 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10dd23759afdac3e2bd7b9f5ad1e8df111f57ac5da85f46f8da24ff04f9269b3"} err="failed to get container status \"10dd23759afdac3e2bd7b9f5ad1e8df111f57ac5da85f46f8da24ff04f9269b3\": rpc error: code = NotFound desc = could not find container \"10dd23759afdac3e2bd7b9f5ad1e8df111f57ac5da85f46f8da24ff04f9269b3\": container with ID starting with 10dd23759afdac3e2bd7b9f5ad1e8df111f57ac5da85f46f8da24ff04f9269b3 not found: ID does not exist" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.016873 4742 scope.go:117] "RemoveContainer" containerID="a0061dd4be94377433ec372b482fc15e1700f814f12276d54dcb9692d64d5aab" Dec 05 06:15:50 crc kubenswrapper[4742]: E1205 06:15:50.017636 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0061dd4be94377433ec372b482fc15e1700f814f12276d54dcb9692d64d5aab\": container with ID starting with a0061dd4be94377433ec372b482fc15e1700f814f12276d54dcb9692d64d5aab not found: ID does not exist" containerID="a0061dd4be94377433ec372b482fc15e1700f814f12276d54dcb9692d64d5aab" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.017679 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0061dd4be94377433ec372b482fc15e1700f814f12276d54dcb9692d64d5aab"} err="failed to get container status \"a0061dd4be94377433ec372b482fc15e1700f814f12276d54dcb9692d64d5aab\": rpc error: code = NotFound desc = could not find container \"a0061dd4be94377433ec372b482fc15e1700f814f12276d54dcb9692d64d5aab\": container with ID starting with a0061dd4be94377433ec372b482fc15e1700f814f12276d54dcb9692d64d5aab not found: ID does not exist" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.017706 4742 scope.go:117] "RemoveContainer" containerID="0eaa57a7d1edb43a21fc4813afcd8bd8362171ef4a64691cceb98492dc6baccb" Dec 05 06:15:50 crc kubenswrapper[4742]: E1205 06:15:50.018454 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0eaa57a7d1edb43a21fc4813afcd8bd8362171ef4a64691cceb98492dc6baccb\": container with ID starting with 0eaa57a7d1edb43a21fc4813afcd8bd8362171ef4a64691cceb98492dc6baccb not found: ID does not exist" containerID="0eaa57a7d1edb43a21fc4813afcd8bd8362171ef4a64691cceb98492dc6baccb" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.018527 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0eaa57a7d1edb43a21fc4813afcd8bd8362171ef4a64691cceb98492dc6baccb"} err="failed to get container status \"0eaa57a7d1edb43a21fc4813afcd8bd8362171ef4a64691cceb98492dc6baccb\": rpc error: code = NotFound desc = could not find container \"0eaa57a7d1edb43a21fc4813afcd8bd8362171ef4a64691cceb98492dc6baccb\": container with ID starting with 0eaa57a7d1edb43a21fc4813afcd8bd8362171ef4a64691cceb98492dc6baccb not found: ID does not exist" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.018572 4742 scope.go:117] "RemoveContainer" containerID="9b48fcbbeeb0dfa6813285a8982e885fd781741008cdbdef2351e5277caa44d7" Dec 05 06:15:50 crc kubenswrapper[4742]: E1205 06:15:50.019347 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b48fcbbeeb0dfa6813285a8982e885fd781741008cdbdef2351e5277caa44d7\": container with ID starting with 9b48fcbbeeb0dfa6813285a8982e885fd781741008cdbdef2351e5277caa44d7 not found: ID does not exist" containerID="9b48fcbbeeb0dfa6813285a8982e885fd781741008cdbdef2351e5277caa44d7" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.019442 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b48fcbbeeb0dfa6813285a8982e885fd781741008cdbdef2351e5277caa44d7"} err="failed to get container status \"9b48fcbbeeb0dfa6813285a8982e885fd781741008cdbdef2351e5277caa44d7\": rpc error: code = NotFound desc = could not find container \"9b48fcbbeeb0dfa6813285a8982e885fd781741008cdbdef2351e5277caa44d7\": container with ID starting with 9b48fcbbeeb0dfa6813285a8982e885fd781741008cdbdef2351e5277caa44d7 not found: ID does not exist" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.019500 4742 scope.go:117] "RemoveContainer" containerID="f326ad43ebacf85ccc332a8b35e3c1fcca45c28c2a4b229df2457a4983e5ac64" Dec 05 06:15:50 crc kubenswrapper[4742]: E1205 06:15:50.019983 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f326ad43ebacf85ccc332a8b35e3c1fcca45c28c2a4b229df2457a4983e5ac64\": container with ID starting with f326ad43ebacf85ccc332a8b35e3c1fcca45c28c2a4b229df2457a4983e5ac64 not found: ID does not exist" containerID="f326ad43ebacf85ccc332a8b35e3c1fcca45c28c2a4b229df2457a4983e5ac64" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.020026 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f326ad43ebacf85ccc332a8b35e3c1fcca45c28c2a4b229df2457a4983e5ac64"} err="failed to get container status \"f326ad43ebacf85ccc332a8b35e3c1fcca45c28c2a4b229df2457a4983e5ac64\": rpc error: code = NotFound desc = could not find container \"f326ad43ebacf85ccc332a8b35e3c1fcca45c28c2a4b229df2457a4983e5ac64\": container with ID starting with f326ad43ebacf85ccc332a8b35e3c1fcca45c28c2a4b229df2457a4983e5ac64 not found: ID does not exist" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.020081 4742 scope.go:117] "RemoveContainer" containerID="a572a48ba1392fc5a8267bbf98db108fc184bd4381327a56c58c1ab6e32e931f" Dec 05 06:15:50 crc kubenswrapper[4742]: E1205 06:15:50.020487 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a572a48ba1392fc5a8267bbf98db108fc184bd4381327a56c58c1ab6e32e931f\": container with ID starting with a572a48ba1392fc5a8267bbf98db108fc184bd4381327a56c58c1ab6e32e931f not found: ID does not exist" containerID="a572a48ba1392fc5a8267bbf98db108fc184bd4381327a56c58c1ab6e32e931f" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.020524 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a572a48ba1392fc5a8267bbf98db108fc184bd4381327a56c58c1ab6e32e931f"} err="failed to get container status \"a572a48ba1392fc5a8267bbf98db108fc184bd4381327a56c58c1ab6e32e931f\": rpc error: code = NotFound desc = could not find container \"a572a48ba1392fc5a8267bbf98db108fc184bd4381327a56c58c1ab6e32e931f\": container with ID starting with a572a48ba1392fc5a8267bbf98db108fc184bd4381327a56c58c1ab6e32e931f not found: ID does not exist" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.020547 4742 scope.go:117] "RemoveContainer" containerID="c5fb081f824bef8d38c8af954df8461ee3d95ecdcf24abe5f14ab12b9b79eaaf" Dec 05 06:15:50 crc kubenswrapper[4742]: E1205 06:15:50.021112 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5fb081f824bef8d38c8af954df8461ee3d95ecdcf24abe5f14ab12b9b79eaaf\": container with ID starting with c5fb081f824bef8d38c8af954df8461ee3d95ecdcf24abe5f14ab12b9b79eaaf not found: ID does not exist" containerID="c5fb081f824bef8d38c8af954df8461ee3d95ecdcf24abe5f14ab12b9b79eaaf" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.021174 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5fb081f824bef8d38c8af954df8461ee3d95ecdcf24abe5f14ab12b9b79eaaf"} err="failed to get container status \"c5fb081f824bef8d38c8af954df8461ee3d95ecdcf24abe5f14ab12b9b79eaaf\": rpc error: code = NotFound desc = could not find container \"c5fb081f824bef8d38c8af954df8461ee3d95ecdcf24abe5f14ab12b9b79eaaf\": container with ID starting with c5fb081f824bef8d38c8af954df8461ee3d95ecdcf24abe5f14ab12b9b79eaaf not found: ID does not exist" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.021223 4742 scope.go:117] "RemoveContainer" containerID="662fdbccb819aa757a5eacbc682c4c9d90ae7096d453acac04fbb8d2c2d724e9" Dec 05 06:15:50 crc kubenswrapper[4742]: E1205 06:15:50.021661 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"662fdbccb819aa757a5eacbc682c4c9d90ae7096d453acac04fbb8d2c2d724e9\": container with ID starting with 662fdbccb819aa757a5eacbc682c4c9d90ae7096d453acac04fbb8d2c2d724e9 not found: ID does not exist" containerID="662fdbccb819aa757a5eacbc682c4c9d90ae7096d453acac04fbb8d2c2d724e9" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.021692 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"662fdbccb819aa757a5eacbc682c4c9d90ae7096d453acac04fbb8d2c2d724e9"} err="failed to get container status \"662fdbccb819aa757a5eacbc682c4c9d90ae7096d453acac04fbb8d2c2d724e9\": rpc error: code = NotFound desc = could not find container \"662fdbccb819aa757a5eacbc682c4c9d90ae7096d453acac04fbb8d2c2d724e9\": container with ID starting with 662fdbccb819aa757a5eacbc682c4c9d90ae7096d453acac04fbb8d2c2d724e9 not found: ID does not exist" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.398935 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" path="/var/lib/kubelet/pods/f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b/volumes" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.933498 4742 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","podb5df8784-b63d-41b7-a542-dcf53ea6cc5e"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort podb5df8784-b63d-41b7-a542-dcf53ea6cc5e] : Timed out while waiting for systemd to remove kubepods-besteffort-podb5df8784_b63d_41b7_a542_dcf53ea6cc5e.slice" Dec 05 06:15:50 crc kubenswrapper[4742]: E1205 06:15:50.933615 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort podb5df8784-b63d-41b7-a542-dcf53ea6cc5e] : unable to destroy cgroup paths for cgroup [kubepods besteffort podb5df8784-b63d-41b7-a542-dcf53ea6cc5e] : Timed out while waiting for systemd to remove kubepods-besteffort-podb5df8784_b63d_41b7_a542_dcf53ea6cc5e.slice" pod="openstack/ovn-controller-9n84z" podUID="b5df8784-b63d-41b7-a542-dcf53ea6cc5e" Dec 05 06:15:50 crc kubenswrapper[4742]: I1205 06:15:50.999806 4742 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","podb8e993d8-0221-4214-b00a-ca745e716bbe"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort podb8e993d8-0221-4214-b00a-ca745e716bbe] : Timed out while waiting for systemd to remove kubepods-besteffort-podb8e993d8_0221_4214_b00a_ca745e716bbe.slice" Dec 05 06:15:50 crc kubenswrapper[4742]: E1205 06:15:50.999897 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort podb8e993d8-0221-4214-b00a-ca745e716bbe] : unable to destroy cgroup paths for cgroup [kubepods besteffort podb8e993d8-0221-4214-b00a-ca745e716bbe] : Timed out while waiting for systemd to remove kubepods-besteffort-podb8e993d8_0221_4214_b00a_ca745e716bbe.slice" pod="openstack/ovsdbserver-sb-0" podUID="b8e993d8-0221-4214-b00a-ca745e716bbe" Dec 05 06:15:51 crc kubenswrapper[4742]: I1205 06:15:51.645439 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 06:15:51 crc kubenswrapper[4742]: I1205 06:15:51.645485 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-9n84z" Dec 05 06:15:51 crc kubenswrapper[4742]: I1205 06:15:51.678584 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-9n84z"] Dec 05 06:15:51 crc kubenswrapper[4742]: I1205 06:15:51.698309 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-9n84z"] Dec 05 06:15:51 crc kubenswrapper[4742]: I1205 06:15:51.708689 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 06:15:51 crc kubenswrapper[4742]: I1205 06:15:51.718612 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 06:15:52 crc kubenswrapper[4742]: I1205 06:15:52.396633 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5df8784-b63d-41b7-a542-dcf53ea6cc5e" path="/var/lib/kubelet/pods/b5df8784-b63d-41b7-a542-dcf53ea6cc5e/volumes" Dec 05 06:15:52 crc kubenswrapper[4742]: I1205 06:15:52.399032 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8e993d8-0221-4214-b00a-ca745e716bbe" path="/var/lib/kubelet/pods/b8e993d8-0221-4214-b00a-ca745e716bbe/volumes" Dec 05 06:15:53 crc kubenswrapper[4742]: I1205 06:15:53.466219 4742 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod931816fd-7570-46ac-b555-368b196b030c"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod931816fd-7570-46ac-b555-368b196b030c] : Timed out while waiting for systemd to remove kubepods-besteffort-pod931816fd_7570_46ac_b555_368b196b030c.slice" Dec 05 06:15:53 crc kubenswrapper[4742]: E1205 06:15:53.466685 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort pod931816fd-7570-46ac-b555-368b196b030c] : unable to destroy cgroup paths for cgroup [kubepods besteffort pod931816fd-7570-46ac-b555-368b196b030c] : Timed out while waiting for systemd to remove kubepods-besteffort-pod931816fd_7570_46ac_b555_368b196b030c.slice" pod="openstack/nova-scheduler-0" podUID="931816fd-7570-46ac-b555-368b196b030c" Dec 05 06:15:53 crc kubenswrapper[4742]: I1205 06:15:53.676927 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 06:15:53 crc kubenswrapper[4742]: I1205 06:15:53.705276 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 06:15:53 crc kubenswrapper[4742]: I1205 06:15:53.712300 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 06:15:54 crc kubenswrapper[4742]: I1205 06:15:54.401849 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="931816fd-7570-46ac-b555-368b196b030c" path="/var/lib/kubelet/pods/931816fd-7570-46ac-b555-368b196b030c/volumes" Dec 05 06:17:09 crc kubenswrapper[4742]: I1205 06:17:09.923841 4742 scope.go:117] "RemoveContainer" containerID="3cc80d22c7502d26367a5895602d250fbad727be72075cf30657b1b606d9c71d" Dec 05 06:17:09 crc kubenswrapper[4742]: I1205 06:17:09.958598 4742 scope.go:117] "RemoveContainer" containerID="24fccc91d8c6dd1906a753332966ef0cd717400a1b2282ac6d7bf3319bb0cf20" Dec 05 06:17:10 crc kubenswrapper[4742]: I1205 06:17:10.010085 4742 scope.go:117] "RemoveContainer" containerID="36db8da42759bb13adccb6c0c7096e6756878db45e78c24b62c01ebefc0b14b1" Dec 05 06:17:10 crc kubenswrapper[4742]: I1205 06:17:10.069779 4742 scope.go:117] "RemoveContainer" containerID="2fb3977eb90709f749280ec6d06d71a5471a123464a4381ce4aa664c99c814b2" Dec 05 06:17:10 crc kubenswrapper[4742]: I1205 06:17:10.094772 4742 scope.go:117] "RemoveContainer" containerID="e1dd1f0e3402faa64ceb85997c4cfb6e94854d0472d6100b68ff61ab2b9908a5" Dec 05 06:17:10 crc kubenswrapper[4742]: I1205 06:17:10.138003 4742 scope.go:117] "RemoveContainer" containerID="1d882621240ed0d1f7dc3e986f4b8a98dd8af495e6e2a4a66c919c647194c445" Dec 05 06:17:10 crc kubenswrapper[4742]: I1205 06:17:10.165238 4742 scope.go:117] "RemoveContainer" containerID="910e17e6a6232277e245b4de61db21d9407700084a851b426cc8c12da12d003b" Dec 05 06:17:10 crc kubenswrapper[4742]: I1205 06:17:10.199110 4742 scope.go:117] "RemoveContainer" containerID="148e03b7513c06d95dcc0aa7eee3cfaf868afafc063c6ea1ceacb1994d13b688" Dec 05 06:17:10 crc kubenswrapper[4742]: I1205 06:17:10.223141 4742 scope.go:117] "RemoveContainer" containerID="5eb38408e54a19b9894ac41a44e186c12072601993fd24a9ae1debe6066e29b3" Dec 05 06:17:10 crc kubenswrapper[4742]: I1205 06:17:10.265764 4742 scope.go:117] "RemoveContainer" containerID="ead6e2c286b745cccfdd700f740be2071abf1e0e57f79f722943c9e07f78cbe0" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.277440 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-k279x"] Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278327 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65eece87-4279-4d6c-b2a6-5841fd5b3298" containerName="neutron-api" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278341 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="65eece87-4279-4d6c-b2a6-5841fd5b3298" containerName="neutron-api" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278352 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20ae73b5-51f4-4bcf-ba9c-c35f566cd07e" containerName="ceilometer-notification-agent" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278358 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="20ae73b5-51f4-4bcf-ba9c-c35f566cd07e" containerName="ceilometer-notification-agent" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278365 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="504b6b10-062b-4d3c-8202-fcfd97bc57aa" containerName="ovsdb-server" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278371 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="504b6b10-062b-4d3c-8202-fcfd97bc57aa" containerName="ovsdb-server" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278377 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="container-replicator" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278382 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="container-replicator" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278389 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65eece87-4279-4d6c-b2a6-5841fd5b3298" containerName="neutron-httpd" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278394 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="65eece87-4279-4d6c-b2a6-5841fd5b3298" containerName="neutron-httpd" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278406 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7038cd99-8151-4157-93c6-3b7f5b9ce25e" containerName="nova-metadata-metadata" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278412 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="7038cd99-8151-4157-93c6-3b7f5b9ce25e" containerName="nova-metadata-metadata" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278419 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7d76df0-4f21-4729-9729-1f2ff54a8332" containerName="ovn-northd" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278424 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7d76df0-4f21-4729-9729-1f2ff54a8332" containerName="ovn-northd" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278431 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6b096f4-483e-48c5-a3e1-a178c0c5ae6e" containerName="rabbitmq" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278437 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6b096f4-483e-48c5-a3e1-a178c0c5ae6e" containerName="rabbitmq" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278446 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61c4b9e1-5266-49eb-8348-3b1034562185" containerName="mariadb-account-delete" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278451 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="61c4b9e1-5266-49eb-8348-3b1034562185" containerName="mariadb-account-delete" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278459 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa702931-d853-4f8b-b0d8-58f5476bb7c2" containerName="nova-api-api" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278465 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa702931-d853-4f8b-b0d8-58f5476bb7c2" containerName="nova-api-api" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278472 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9974486-076d-4493-af32-a08eef334572" containerName="barbican-api" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278479 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9974486-076d-4493-af32-a08eef334572" containerName="barbican-api" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278487 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="object-updater" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278495 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="object-updater" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278505 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="account-server" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278510 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="account-server" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278519 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="object-replicator" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278525 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="object-replicator" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278533 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6063f78-1b45-493e-ae25-62239a1ed5e3" containerName="cinder-api" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278539 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6063f78-1b45-493e-ae25-62239a1ed5e3" containerName="cinder-api" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278549 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb4dce96-8228-455b-9edc-37a62af6e732" containerName="kube-state-metrics" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278554 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb4dce96-8228-455b-9edc-37a62af6e732" containerName="kube-state-metrics" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278565 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20ae73b5-51f4-4bcf-ba9c-c35f566cd07e" containerName="ceilometer-central-agent" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278571 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="20ae73b5-51f4-4bcf-ba9c-c35f566cd07e" containerName="ceilometer-central-agent" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278581 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa30e851-f383-42c0-9e09-d8c896ed77ad" containerName="barbican-worker" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278587 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa30e851-f383-42c0-9e09-d8c896ed77ad" containerName="barbican-worker" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278595 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7a764d5-447f-483d-b819-0e398e749600" containerName="glance-httpd" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278600 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7a764d5-447f-483d-b819-0e398e749600" containerName="glance-httpd" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278607 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b5d8165-e06e-4600-9cab-9cf84c010725" containerName="rabbitmq" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278613 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b5d8165-e06e-4600-9cab-9cf84c010725" containerName="rabbitmq" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278619 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f4c3ae5-d78c-4ddb-953d-cbee5b815be9" containerName="barbican-keystone-listener-log" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278624 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f4c3ae5-d78c-4ddb-953d-cbee5b815be9" containerName="barbican-keystone-listener-log" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278632 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1b7e898-ff4e-4523-8602-18d5937c3e5f" containerName="nova-cell1-conductor-conductor" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278650 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1b7e898-ff4e-4523-8602-18d5937c3e5f" containerName="nova-cell1-conductor-conductor" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278662 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7038cd99-8151-4157-93c6-3b7f5b9ce25e" containerName="nova-metadata-log" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278668 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="7038cd99-8151-4157-93c6-3b7f5b9ce25e" containerName="nova-metadata-log" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278677 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4ba4170-0240-42d9-85f4-cf3587f39f02" containerName="galera" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278683 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4ba4170-0240-42d9-85f4-cf3587f39f02" containerName="galera" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278691 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d993905-0c76-454d-8eac-8a93674522db" containerName="keystone-api" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278696 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d993905-0c76-454d-8eac-8a93674522db" containerName="keystone-api" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278704 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="504b6b10-062b-4d3c-8202-fcfd97bc57aa" containerName="ovsdb-server-init" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278710 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="504b6b10-062b-4d3c-8202-fcfd97bc57aa" containerName="ovsdb-server-init" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278717 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7a764d5-447f-483d-b819-0e398e749600" containerName="glance-log" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278722 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7a764d5-447f-483d-b819-0e398e749600" containerName="glance-log" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278730 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7d76df0-4f21-4729-9729-1f2ff54a8332" containerName="openstack-network-exporter" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278736 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7d76df0-4f21-4729-9729-1f2ff54a8332" containerName="openstack-network-exporter" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278744 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="439adfba-ae29-4db5-8a77-88eede9d0bd9" containerName="extract-content" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278750 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="439adfba-ae29-4db5-8a77-88eede9d0bd9" containerName="extract-content" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278759 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3428207-2cb4-47d8-b4d8-941c3a4928fb" containerName="glance-log" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278764 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3428207-2cb4-47d8-b4d8-941c3a4928fb" containerName="glance-log" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278773 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f2ab762-07a0-426d-a84a-a53ad7e2fef0" containerName="mariadb-account-delete" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278779 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f2ab762-07a0-426d-a84a-a53ad7e2fef0" containerName="mariadb-account-delete" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278789 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e42757b3-029e-4fe9-917f-73331394524e" containerName="placement-log" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278795 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="e42757b3-029e-4fe9-917f-73331394524e" containerName="placement-log" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278804 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f4c3ae5-d78c-4ddb-953d-cbee5b815be9" containerName="barbican-keystone-listener" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278810 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f4c3ae5-d78c-4ddb-953d-cbee5b815be9" containerName="barbican-keystone-listener" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278823 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="504b6b10-062b-4d3c-8202-fcfd97bc57aa" containerName="ovs-vswitchd" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278829 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="504b6b10-062b-4d3c-8202-fcfd97bc57aa" containerName="ovs-vswitchd" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278838 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="338b9928-12cd-4db4-806e-4f42612c5ab6" containerName="nova-cell0-conductor-conductor" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278843 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="338b9928-12cd-4db4-806e-4f42612c5ab6" containerName="nova-cell0-conductor-conductor" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278853 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa702931-d853-4f8b-b0d8-58f5476bb7c2" containerName="nova-api-log" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278858 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa702931-d853-4f8b-b0d8-58f5476bb7c2" containerName="nova-api-log" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278872 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20ae73b5-51f4-4bcf-ba9c-c35f566cd07e" containerName="proxy-httpd" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278878 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="20ae73b5-51f4-4bcf-ba9c-c35f566cd07e" containerName="proxy-httpd" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278883 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42c1f939-2d9c-4a8d-a341-cbce22551d58" containerName="mariadb-account-delete" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278889 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="42c1f939-2d9c-4a8d-a341-cbce22551d58" containerName="mariadb-account-delete" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278896 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="account-reaper" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278902 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="account-reaper" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278911 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="rsync" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278917 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="rsync" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278928 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b2208e7-3101-4090-9f35-fba640d2f1d9" containerName="mariadb-account-delete" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278933 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b2208e7-3101-4090-9f35-fba640d2f1d9" containerName="mariadb-account-delete" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278942 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="object-auditor" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278947 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="object-auditor" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278955 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3428207-2cb4-47d8-b4d8-941c3a4928fb" containerName="glance-httpd" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278960 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3428207-2cb4-47d8-b4d8-941c3a4928fb" containerName="glance-httpd" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278968 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20ae73b5-51f4-4bcf-ba9c-c35f566cd07e" containerName="sg-core" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278974 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="20ae73b5-51f4-4bcf-ba9c-c35f566cd07e" containerName="sg-core" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278979 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="swift-recon-cron" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278985 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="swift-recon-cron" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.278991 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e42757b3-029e-4fe9-917f-73331394524e" containerName="placement-api" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.278997 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="e42757b3-029e-4fe9-917f-73331394524e" containerName="placement-api" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.279006 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="439adfba-ae29-4db5-8a77-88eede9d0bd9" containerName="extract-utilities" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279011 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="439adfba-ae29-4db5-8a77-88eede9d0bd9" containerName="extract-utilities" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.279021 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="container-server" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279027 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="container-server" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.279034 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a5ca1f6-73b0-43da-82c6-995495666585" containerName="mariadb-account-delete" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279040 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a5ca1f6-73b0-43da-82c6-995495666585" containerName="mariadb-account-delete" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.279047 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b956518-9768-477f-9acb-1fc3459427f7" containerName="mariadb-account-delete" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279073 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b956518-9768-477f-9acb-1fc3459427f7" containerName="mariadb-account-delete" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.279082 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="account-auditor" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279088 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="account-auditor" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.279097 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4ba4170-0240-42d9-85f4-cf3587f39f02" containerName="mysql-bootstrap" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279103 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4ba4170-0240-42d9-85f4-cf3587f39f02" containerName="mysql-bootstrap" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.279109 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4227032-1b4c-4059-b91f-cf5ece6b20b2" containerName="mariadb-account-delete" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279115 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4227032-1b4c-4059-b91f-cf5ece6b20b2" containerName="mariadb-account-delete" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.279121 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="account-replicator" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279127 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="account-replicator" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.279134 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="object-expirer" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279139 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="object-expirer" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.279149 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec392288-7e80-4956-836c-d400d4460ebc" containerName="memcached" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279154 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec392288-7e80-4956-836c-d400d4460ebc" containerName="memcached" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.279161 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="container-auditor" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279167 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="container-auditor" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.279177 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6b096f4-483e-48c5-a3e1-a178c0c5ae6e" containerName="setup-container" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279182 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6b096f4-483e-48c5-a3e1-a178c0c5ae6e" containerName="setup-container" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.279188 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="439adfba-ae29-4db5-8a77-88eede9d0bd9" containerName="registry-server" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279194 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="439adfba-ae29-4db5-8a77-88eede9d0bd9" containerName="registry-server" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.279200 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b5d8165-e06e-4600-9cab-9cf84c010725" containerName="setup-container" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279206 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b5d8165-e06e-4600-9cab-9cf84c010725" containerName="setup-container" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.279216 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a690523-b1e4-4dd5-b280-58fd8b91b3bf" containerName="mariadb-account-delete" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279221 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a690523-b1e4-4dd5-b280-58fd8b91b3bf" containerName="mariadb-account-delete" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.279229 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa30e851-f383-42c0-9e09-d8c896ed77ad" containerName="barbican-worker-log" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279234 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa30e851-f383-42c0-9e09-d8c896ed77ad" containerName="barbican-worker-log" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.279241 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="object-server" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279247 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="object-server" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.279255 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9974486-076d-4493-af32-a08eef334572" containerName="barbican-api-log" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279261 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9974486-076d-4493-af32-a08eef334572" containerName="barbican-api-log" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.279268 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6063f78-1b45-493e-ae25-62239a1ed5e3" containerName="cinder-api-log" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279274 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6063f78-1b45-493e-ae25-62239a1ed5e3" containerName="cinder-api-log" Dec 05 06:17:36 crc kubenswrapper[4742]: E1205 06:17:36.279284 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="container-updater" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279289 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="container-updater" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279412 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="61c4b9e1-5266-49eb-8348-3b1034562185" containerName="mariadb-account-delete" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279423 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="container-server" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279429 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="7038cd99-8151-4157-93c6-3b7f5b9ce25e" containerName="nova-metadata-metadata" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279436 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="65eece87-4279-4d6c-b2a6-5841fd5b3298" containerName="neutron-api" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279444 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="object-updater" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279459 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="338b9928-12cd-4db4-806e-4f42612c5ab6" containerName="nova-cell0-conductor-conductor" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279468 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="42c1f939-2d9c-4a8d-a341-cbce22551d58" containerName="mariadb-account-delete" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279477 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9974486-076d-4493-af32-a08eef334572" containerName="barbican-api" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279487 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa30e851-f383-42c0-9e09-d8c896ed77ad" containerName="barbican-worker-log" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279497 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="object-server" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279503 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="object-replicator" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279511 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="object-expirer" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279517 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7a764d5-447f-483d-b819-0e398e749600" containerName="glance-log" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279525 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="504b6b10-062b-4d3c-8202-fcfd97bc57aa" containerName="ovsdb-server" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279545 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6063f78-1b45-493e-ae25-62239a1ed5e3" containerName="cinder-api" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279552 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b2208e7-3101-4090-9f35-fba640d2f1d9" containerName="mariadb-account-delete" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279560 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="504b6b10-062b-4d3c-8202-fcfd97bc57aa" containerName="ovs-vswitchd" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279569 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa702931-d853-4f8b-b0d8-58f5476bb7c2" containerName="nova-api-log" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279581 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="20ae73b5-51f4-4bcf-ba9c-c35f566cd07e" containerName="sg-core" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279588 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa702931-d853-4f8b-b0d8-58f5476bb7c2" containerName="nova-api-api" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279597 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7a764d5-447f-483d-b819-0e398e749600" containerName="glance-httpd" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279603 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="rsync" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279612 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f4c3ae5-d78c-4ddb-953d-cbee5b815be9" containerName="barbican-keystone-listener" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279618 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="account-auditor" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279625 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="container-auditor" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279631 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="container-updater" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279638 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa30e851-f383-42c0-9e09-d8c896ed77ad" containerName="barbican-worker" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279646 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f2ab762-07a0-426d-a84a-a53ad7e2fef0" containerName="mariadb-account-delete" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279653 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b956518-9768-477f-9acb-1fc3459427f7" containerName="mariadb-account-delete" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279659 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3428207-2cb4-47d8-b4d8-941c3a4928fb" containerName="glance-httpd" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279667 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a5ca1f6-73b0-43da-82c6-995495666585" containerName="mariadb-account-delete" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279675 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="e42757b3-029e-4fe9-917f-73331394524e" containerName="placement-log" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279683 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1b7e898-ff4e-4523-8602-18d5937c3e5f" containerName="nova-cell1-conductor-conductor" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279693 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec392288-7e80-4956-836c-d400d4460ebc" containerName="memcached" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279700 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb4dce96-8228-455b-9edc-37a62af6e732" containerName="kube-state-metrics" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279709 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6b096f4-483e-48c5-a3e1-a178c0c5ae6e" containerName="rabbitmq" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279715 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="20ae73b5-51f4-4bcf-ba9c-c35f566cd07e" containerName="ceilometer-notification-agent" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279722 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="container-replicator" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279730 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f4c3ae5-d78c-4ddb-953d-cbee5b815be9" containerName="barbican-keystone-listener-log" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279738 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b5d8165-e06e-4600-9cab-9cf84c010725" containerName="rabbitmq" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279747 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="20ae73b5-51f4-4bcf-ba9c-c35f566cd07e" containerName="ceilometer-central-agent" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279754 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7d76df0-4f21-4729-9729-1f2ff54a8332" containerName="openstack-network-exporter" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279762 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4ba4170-0240-42d9-85f4-cf3587f39f02" containerName="galera" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279770 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="439adfba-ae29-4db5-8a77-88eede9d0bd9" containerName="registry-server" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279777 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="e42757b3-029e-4fe9-917f-73331394524e" containerName="placement-api" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279785 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="swift-recon-cron" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279792 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a690523-b1e4-4dd5-b280-58fd8b91b3bf" containerName="mariadb-account-delete" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279799 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6063f78-1b45-493e-ae25-62239a1ed5e3" containerName="cinder-api-log" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279806 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7d76df0-4f21-4729-9729-1f2ff54a8332" containerName="ovn-northd" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279812 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="65eece87-4279-4d6c-b2a6-5841fd5b3298" containerName="neutron-httpd" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279818 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="object-auditor" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279823 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d993905-0c76-454d-8eac-8a93674522db" containerName="keystone-api" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279830 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9974486-076d-4493-af32-a08eef334572" containerName="barbican-api-log" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279835 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="account-reaper" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279844 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="20ae73b5-51f4-4bcf-ba9c-c35f566cd07e" containerName="proxy-httpd" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279853 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4227032-1b4c-4059-b91f-cf5ece6b20b2" containerName="mariadb-account-delete" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279860 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="account-replicator" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279869 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3428207-2cb4-47d8-b4d8-941c3a4928fb" containerName="glance-log" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279876 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6d1bf24-115c-4dd5-8dcd-84a3b4e7456b" containerName="account-server" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.279883 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="7038cd99-8151-4157-93c6-3b7f5b9ce25e" containerName="nova-metadata-log" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.280827 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k279x" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.293620 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-k279x"] Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.317314 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9249c786-5049-4e62-b0e6-5ec05ea5f61a-utilities\") pod \"community-operators-k279x\" (UID: \"9249c786-5049-4e62-b0e6-5ec05ea5f61a\") " pod="openshift-marketplace/community-operators-k279x" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.317839 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9249c786-5049-4e62-b0e6-5ec05ea5f61a-catalog-content\") pod \"community-operators-k279x\" (UID: \"9249c786-5049-4e62-b0e6-5ec05ea5f61a\") " pod="openshift-marketplace/community-operators-k279x" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.317951 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25fp5\" (UniqueName: \"kubernetes.io/projected/9249c786-5049-4e62-b0e6-5ec05ea5f61a-kube-api-access-25fp5\") pod \"community-operators-k279x\" (UID: \"9249c786-5049-4e62-b0e6-5ec05ea5f61a\") " pod="openshift-marketplace/community-operators-k279x" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.419758 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9249c786-5049-4e62-b0e6-5ec05ea5f61a-utilities\") pod \"community-operators-k279x\" (UID: \"9249c786-5049-4e62-b0e6-5ec05ea5f61a\") " pod="openshift-marketplace/community-operators-k279x" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.419866 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9249c786-5049-4e62-b0e6-5ec05ea5f61a-catalog-content\") pod \"community-operators-k279x\" (UID: \"9249c786-5049-4e62-b0e6-5ec05ea5f61a\") " pod="openshift-marketplace/community-operators-k279x" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.419951 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25fp5\" (UniqueName: \"kubernetes.io/projected/9249c786-5049-4e62-b0e6-5ec05ea5f61a-kube-api-access-25fp5\") pod \"community-operators-k279x\" (UID: \"9249c786-5049-4e62-b0e6-5ec05ea5f61a\") " pod="openshift-marketplace/community-operators-k279x" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.420788 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9249c786-5049-4e62-b0e6-5ec05ea5f61a-catalog-content\") pod \"community-operators-k279x\" (UID: \"9249c786-5049-4e62-b0e6-5ec05ea5f61a\") " pod="openshift-marketplace/community-operators-k279x" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.420821 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9249c786-5049-4e62-b0e6-5ec05ea5f61a-utilities\") pod \"community-operators-k279x\" (UID: \"9249c786-5049-4e62-b0e6-5ec05ea5f61a\") " pod="openshift-marketplace/community-operators-k279x" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.443075 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25fp5\" (UniqueName: \"kubernetes.io/projected/9249c786-5049-4e62-b0e6-5ec05ea5f61a-kube-api-access-25fp5\") pod \"community-operators-k279x\" (UID: \"9249c786-5049-4e62-b0e6-5ec05ea5f61a\") " pod="openshift-marketplace/community-operators-k279x" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.602384 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k279x" Dec 05 06:17:36 crc kubenswrapper[4742]: I1205 06:17:36.891210 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-k279x"] Dec 05 06:17:37 crc kubenswrapper[4742]: I1205 06:17:37.830737 4742 generic.go:334] "Generic (PLEG): container finished" podID="9249c786-5049-4e62-b0e6-5ec05ea5f61a" containerID="1021ea44f821a03a275201bca5ac8eb3764a77d46696be2af82ae6775296d80c" exitCode=0 Dec 05 06:17:37 crc kubenswrapper[4742]: I1205 06:17:37.830837 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k279x" event={"ID":"9249c786-5049-4e62-b0e6-5ec05ea5f61a","Type":"ContainerDied","Data":"1021ea44f821a03a275201bca5ac8eb3764a77d46696be2af82ae6775296d80c"} Dec 05 06:17:37 crc kubenswrapper[4742]: I1205 06:17:37.831188 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k279x" event={"ID":"9249c786-5049-4e62-b0e6-5ec05ea5f61a","Type":"ContainerStarted","Data":"b5709a1815547bff70dca8afefc9e9734f6a80fe0b48729d201301fb46d41a5c"} Dec 05 06:17:40 crc kubenswrapper[4742]: I1205 06:17:40.865654 4742 generic.go:334] "Generic (PLEG): container finished" podID="9249c786-5049-4e62-b0e6-5ec05ea5f61a" containerID="8d4fe99f541b2cc4920e50e15ad1e2e8e44af4764d5cf9f2f1324b6f304878eb" exitCode=0 Dec 05 06:17:40 crc kubenswrapper[4742]: I1205 06:17:40.865770 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k279x" event={"ID":"9249c786-5049-4e62-b0e6-5ec05ea5f61a","Type":"ContainerDied","Data":"8d4fe99f541b2cc4920e50e15ad1e2e8e44af4764d5cf9f2f1324b6f304878eb"} Dec 05 06:17:41 crc kubenswrapper[4742]: I1205 06:17:41.878037 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k279x" event={"ID":"9249c786-5049-4e62-b0e6-5ec05ea5f61a","Type":"ContainerStarted","Data":"1352995ab0b8fcc528842eca092e5f14ff85fea837ef32c2f2a6928e71cc0ba9"} Dec 05 06:17:41 crc kubenswrapper[4742]: I1205 06:17:41.899318 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-k279x" podStartSLOduration=2.436261822 podStartE2EDuration="5.899292911s" podCreationTimestamp="2025-12-05 06:17:36 +0000 UTC" firstStartedPulling="2025-12-05 06:17:37.833948836 +0000 UTC m=+1533.746083928" lastFinishedPulling="2025-12-05 06:17:41.296979925 +0000 UTC m=+1537.209115017" observedRunningTime="2025-12-05 06:17:41.894757132 +0000 UTC m=+1537.806892184" watchObservedRunningTime="2025-12-05 06:17:41.899292911 +0000 UTC m=+1537.811427973" Dec 05 06:17:46 crc kubenswrapper[4742]: I1205 06:17:46.602890 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-k279x" Dec 05 06:17:46 crc kubenswrapper[4742]: I1205 06:17:46.603584 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-k279x" Dec 05 06:17:46 crc kubenswrapper[4742]: I1205 06:17:46.671790 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:17:46 crc kubenswrapper[4742]: I1205 06:17:46.671893 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:17:46 crc kubenswrapper[4742]: I1205 06:17:46.676898 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-k279x" Dec 05 06:17:46 crc kubenswrapper[4742]: I1205 06:17:46.981433 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-k279x" Dec 05 06:17:47 crc kubenswrapper[4742]: I1205 06:17:47.036711 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-k279x"] Dec 05 06:17:48 crc kubenswrapper[4742]: I1205 06:17:48.942508 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-k279x" podUID="9249c786-5049-4e62-b0e6-5ec05ea5f61a" containerName="registry-server" containerID="cri-o://1352995ab0b8fcc528842eca092e5f14ff85fea837ef32c2f2a6928e71cc0ba9" gracePeriod=2 Dec 05 06:17:49 crc kubenswrapper[4742]: I1205 06:17:49.949135 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k279x" Dec 05 06:17:49 crc kubenswrapper[4742]: I1205 06:17:49.957140 4742 generic.go:334] "Generic (PLEG): container finished" podID="9249c786-5049-4e62-b0e6-5ec05ea5f61a" containerID="1352995ab0b8fcc528842eca092e5f14ff85fea837ef32c2f2a6928e71cc0ba9" exitCode=0 Dec 05 06:17:49 crc kubenswrapper[4742]: I1205 06:17:49.957206 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-k279x" Dec 05 06:17:49 crc kubenswrapper[4742]: I1205 06:17:49.957208 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k279x" event={"ID":"9249c786-5049-4e62-b0e6-5ec05ea5f61a","Type":"ContainerDied","Data":"1352995ab0b8fcc528842eca092e5f14ff85fea837ef32c2f2a6928e71cc0ba9"} Dec 05 06:17:49 crc kubenswrapper[4742]: I1205 06:17:49.957481 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-k279x" event={"ID":"9249c786-5049-4e62-b0e6-5ec05ea5f61a","Type":"ContainerDied","Data":"b5709a1815547bff70dca8afefc9e9734f6a80fe0b48729d201301fb46d41a5c"} Dec 05 06:17:49 crc kubenswrapper[4742]: I1205 06:17:49.957510 4742 scope.go:117] "RemoveContainer" containerID="1352995ab0b8fcc528842eca092e5f14ff85fea837ef32c2f2a6928e71cc0ba9" Dec 05 06:17:49 crc kubenswrapper[4742]: I1205 06:17:49.989233 4742 scope.go:117] "RemoveContainer" containerID="8d4fe99f541b2cc4920e50e15ad1e2e8e44af4764d5cf9f2f1324b6f304878eb" Dec 05 06:17:50 crc kubenswrapper[4742]: I1205 06:17:50.020471 4742 scope.go:117] "RemoveContainer" containerID="1021ea44f821a03a275201bca5ac8eb3764a77d46696be2af82ae6775296d80c" Dec 05 06:17:50 crc kubenswrapper[4742]: I1205 06:17:50.044443 4742 scope.go:117] "RemoveContainer" containerID="1352995ab0b8fcc528842eca092e5f14ff85fea837ef32c2f2a6928e71cc0ba9" Dec 05 06:17:50 crc kubenswrapper[4742]: E1205 06:17:50.045107 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1352995ab0b8fcc528842eca092e5f14ff85fea837ef32c2f2a6928e71cc0ba9\": container with ID starting with 1352995ab0b8fcc528842eca092e5f14ff85fea837ef32c2f2a6928e71cc0ba9 not found: ID does not exist" containerID="1352995ab0b8fcc528842eca092e5f14ff85fea837ef32c2f2a6928e71cc0ba9" Dec 05 06:17:50 crc kubenswrapper[4742]: I1205 06:17:50.045168 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1352995ab0b8fcc528842eca092e5f14ff85fea837ef32c2f2a6928e71cc0ba9"} err="failed to get container status \"1352995ab0b8fcc528842eca092e5f14ff85fea837ef32c2f2a6928e71cc0ba9\": rpc error: code = NotFound desc = could not find container \"1352995ab0b8fcc528842eca092e5f14ff85fea837ef32c2f2a6928e71cc0ba9\": container with ID starting with 1352995ab0b8fcc528842eca092e5f14ff85fea837ef32c2f2a6928e71cc0ba9 not found: ID does not exist" Dec 05 06:17:50 crc kubenswrapper[4742]: I1205 06:17:50.045202 4742 scope.go:117] "RemoveContainer" containerID="8d4fe99f541b2cc4920e50e15ad1e2e8e44af4764d5cf9f2f1324b6f304878eb" Dec 05 06:17:50 crc kubenswrapper[4742]: E1205 06:17:50.046147 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d4fe99f541b2cc4920e50e15ad1e2e8e44af4764d5cf9f2f1324b6f304878eb\": container with ID starting with 8d4fe99f541b2cc4920e50e15ad1e2e8e44af4764d5cf9f2f1324b6f304878eb not found: ID does not exist" containerID="8d4fe99f541b2cc4920e50e15ad1e2e8e44af4764d5cf9f2f1324b6f304878eb" Dec 05 06:17:50 crc kubenswrapper[4742]: I1205 06:17:50.046254 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d4fe99f541b2cc4920e50e15ad1e2e8e44af4764d5cf9f2f1324b6f304878eb"} err="failed to get container status \"8d4fe99f541b2cc4920e50e15ad1e2e8e44af4764d5cf9f2f1324b6f304878eb\": rpc error: code = NotFound desc = could not find container \"8d4fe99f541b2cc4920e50e15ad1e2e8e44af4764d5cf9f2f1324b6f304878eb\": container with ID starting with 8d4fe99f541b2cc4920e50e15ad1e2e8e44af4764d5cf9f2f1324b6f304878eb not found: ID does not exist" Dec 05 06:17:50 crc kubenswrapper[4742]: I1205 06:17:50.046293 4742 scope.go:117] "RemoveContainer" containerID="1021ea44f821a03a275201bca5ac8eb3764a77d46696be2af82ae6775296d80c" Dec 05 06:17:50 crc kubenswrapper[4742]: E1205 06:17:50.046698 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1021ea44f821a03a275201bca5ac8eb3764a77d46696be2af82ae6775296d80c\": container with ID starting with 1021ea44f821a03a275201bca5ac8eb3764a77d46696be2af82ae6775296d80c not found: ID does not exist" containerID="1021ea44f821a03a275201bca5ac8eb3764a77d46696be2af82ae6775296d80c" Dec 05 06:17:50 crc kubenswrapper[4742]: I1205 06:17:50.046742 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1021ea44f821a03a275201bca5ac8eb3764a77d46696be2af82ae6775296d80c"} err="failed to get container status \"1021ea44f821a03a275201bca5ac8eb3764a77d46696be2af82ae6775296d80c\": rpc error: code = NotFound desc = could not find container \"1021ea44f821a03a275201bca5ac8eb3764a77d46696be2af82ae6775296d80c\": container with ID starting with 1021ea44f821a03a275201bca5ac8eb3764a77d46696be2af82ae6775296d80c not found: ID does not exist" Dec 05 06:17:50 crc kubenswrapper[4742]: I1205 06:17:50.057674 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9249c786-5049-4e62-b0e6-5ec05ea5f61a-catalog-content\") pod \"9249c786-5049-4e62-b0e6-5ec05ea5f61a\" (UID: \"9249c786-5049-4e62-b0e6-5ec05ea5f61a\") " Dec 05 06:17:50 crc kubenswrapper[4742]: I1205 06:17:50.057773 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25fp5\" (UniqueName: \"kubernetes.io/projected/9249c786-5049-4e62-b0e6-5ec05ea5f61a-kube-api-access-25fp5\") pod \"9249c786-5049-4e62-b0e6-5ec05ea5f61a\" (UID: \"9249c786-5049-4e62-b0e6-5ec05ea5f61a\") " Dec 05 06:17:50 crc kubenswrapper[4742]: I1205 06:17:50.057850 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9249c786-5049-4e62-b0e6-5ec05ea5f61a-utilities\") pod \"9249c786-5049-4e62-b0e6-5ec05ea5f61a\" (UID: \"9249c786-5049-4e62-b0e6-5ec05ea5f61a\") " Dec 05 06:17:50 crc kubenswrapper[4742]: I1205 06:17:50.058898 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9249c786-5049-4e62-b0e6-5ec05ea5f61a-utilities" (OuterVolumeSpecName: "utilities") pod "9249c786-5049-4e62-b0e6-5ec05ea5f61a" (UID: "9249c786-5049-4e62-b0e6-5ec05ea5f61a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:17:50 crc kubenswrapper[4742]: I1205 06:17:50.066370 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9249c786-5049-4e62-b0e6-5ec05ea5f61a-kube-api-access-25fp5" (OuterVolumeSpecName: "kube-api-access-25fp5") pod "9249c786-5049-4e62-b0e6-5ec05ea5f61a" (UID: "9249c786-5049-4e62-b0e6-5ec05ea5f61a"). InnerVolumeSpecName "kube-api-access-25fp5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:17:50 crc kubenswrapper[4742]: I1205 06:17:50.120006 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9249c786-5049-4e62-b0e6-5ec05ea5f61a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9249c786-5049-4e62-b0e6-5ec05ea5f61a" (UID: "9249c786-5049-4e62-b0e6-5ec05ea5f61a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:17:50 crc kubenswrapper[4742]: I1205 06:17:50.159805 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25fp5\" (UniqueName: \"kubernetes.io/projected/9249c786-5049-4e62-b0e6-5ec05ea5f61a-kube-api-access-25fp5\") on node \"crc\" DevicePath \"\"" Dec 05 06:17:50 crc kubenswrapper[4742]: I1205 06:17:50.159842 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9249c786-5049-4e62-b0e6-5ec05ea5f61a-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:17:50 crc kubenswrapper[4742]: I1205 06:17:50.159854 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9249c786-5049-4e62-b0e6-5ec05ea5f61a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:17:50 crc kubenswrapper[4742]: I1205 06:17:50.321621 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-k279x"] Dec 05 06:17:50 crc kubenswrapper[4742]: I1205 06:17:50.332934 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-k279x"] Dec 05 06:17:50 crc kubenswrapper[4742]: I1205 06:17:50.395846 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9249c786-5049-4e62-b0e6-5ec05ea5f61a" path="/var/lib/kubelet/pods/9249c786-5049-4e62-b0e6-5ec05ea5f61a/volumes" Dec 05 06:18:10 crc kubenswrapper[4742]: I1205 06:18:10.517494 4742 scope.go:117] "RemoveContainer" containerID="d4b4d53a61d568d27cd31811531077ba4d61d36b10ce070641d0523f22a892e8" Dec 05 06:18:10 crc kubenswrapper[4742]: I1205 06:18:10.554481 4742 scope.go:117] "RemoveContainer" containerID="ebc99ef0e07f3df716b11ee744309c678a42c8f8c434aa81b990d99b4d657ada" Dec 05 06:18:10 crc kubenswrapper[4742]: I1205 06:18:10.588429 4742 scope.go:117] "RemoveContainer" containerID="40e5259c068c72cb7a6feea8562a4bf25bb56c6f5b866eb1a774f17692a3f47c" Dec 05 06:18:10 crc kubenswrapper[4742]: I1205 06:18:10.660345 4742 scope.go:117] "RemoveContainer" containerID="a67a9bcba49907c071cc13e63399467b1596cfba2f9e45b6f818f93406035ff6" Dec 05 06:18:10 crc kubenswrapper[4742]: I1205 06:18:10.693246 4742 scope.go:117] "RemoveContainer" containerID="cf62b4048920e6f2c6c7dd02a335730388cd54bf021450a140d37df0f8600700" Dec 05 06:18:10 crc kubenswrapper[4742]: I1205 06:18:10.726264 4742 scope.go:117] "RemoveContainer" containerID="36120690a82b6f681780c91a20a3cc8a0a413e14252b3db344b7e688541ec8f6" Dec 05 06:18:10 crc kubenswrapper[4742]: I1205 06:18:10.759772 4742 scope.go:117] "RemoveContainer" containerID="5c49035a642271e66ce42bb0d1a45ebedab35110a82c05d221201aa774a0b6fc" Dec 05 06:18:10 crc kubenswrapper[4742]: I1205 06:18:10.792416 4742 scope.go:117] "RemoveContainer" containerID="b11b94d0ec61d166b0c11832fa3e1bd3d6d14ce98de189313ff3cf8c052111ac" Dec 05 06:18:10 crc kubenswrapper[4742]: I1205 06:18:10.817141 4742 scope.go:117] "RemoveContainer" containerID="3dfe6d954682eaace3f2556b9d454fccb07131b5cf542e9331587cb77a98c236" Dec 05 06:18:10 crc kubenswrapper[4742]: I1205 06:18:10.855977 4742 scope.go:117] "RemoveContainer" containerID="4c14252e4fd49cea57c985322e4ad4f38df07e8c27f7e24e38b80a1fb1bca49f" Dec 05 06:18:10 crc kubenswrapper[4742]: I1205 06:18:10.920618 4742 scope.go:117] "RemoveContainer" containerID="327958d28604e61b0f08263d448489011f061103aa9d6f3703a193a7c7d54c73" Dec 05 06:18:10 crc kubenswrapper[4742]: I1205 06:18:10.961161 4742 scope.go:117] "RemoveContainer" containerID="cf30eeea73b225196dce65f05adf26d5dd173c94ed9ebc9ce44cfa0d14ed9410" Dec 05 06:18:10 crc kubenswrapper[4742]: I1205 06:18:10.978382 4742 scope.go:117] "RemoveContainer" containerID="510a3781c536a45f9268a131d634df6000b7aa0790f8ecd41f3df0f245747f22" Dec 05 06:18:10 crc kubenswrapper[4742]: I1205 06:18:10.997631 4742 scope.go:117] "RemoveContainer" containerID="7e33ab88a84bfcd32ed36ac73c5cc6d18581c628f85c65c7a393f805d3496823" Dec 05 06:18:11 crc kubenswrapper[4742]: I1205 06:18:11.016540 4742 scope.go:117] "RemoveContainer" containerID="2aafa85d6212561f15a268369138905fcc47bfc7c260a0f1db5a1bb422e74c99" Dec 05 06:18:11 crc kubenswrapper[4742]: I1205 06:18:11.036764 4742 scope.go:117] "RemoveContainer" containerID="e7e04a131329ec48b97ade2755e762fab3c6aca07ebac33bdded7c2512e31b7c" Dec 05 06:18:16 crc kubenswrapper[4742]: I1205 06:18:16.671846 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:18:16 crc kubenswrapper[4742]: I1205 06:18:16.672859 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:18:46 crc kubenswrapper[4742]: I1205 06:18:46.091575 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rxrk8"] Dec 05 06:18:46 crc kubenswrapper[4742]: E1205 06:18:46.092603 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9249c786-5049-4e62-b0e6-5ec05ea5f61a" containerName="extract-utilities" Dec 05 06:18:46 crc kubenswrapper[4742]: I1205 06:18:46.092623 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="9249c786-5049-4e62-b0e6-5ec05ea5f61a" containerName="extract-utilities" Dec 05 06:18:46 crc kubenswrapper[4742]: E1205 06:18:46.092646 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9249c786-5049-4e62-b0e6-5ec05ea5f61a" containerName="extract-content" Dec 05 06:18:46 crc kubenswrapper[4742]: I1205 06:18:46.092656 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="9249c786-5049-4e62-b0e6-5ec05ea5f61a" containerName="extract-content" Dec 05 06:18:46 crc kubenswrapper[4742]: E1205 06:18:46.092680 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9249c786-5049-4e62-b0e6-5ec05ea5f61a" containerName="registry-server" Dec 05 06:18:46 crc kubenswrapper[4742]: I1205 06:18:46.092690 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="9249c786-5049-4e62-b0e6-5ec05ea5f61a" containerName="registry-server" Dec 05 06:18:46 crc kubenswrapper[4742]: I1205 06:18:46.092926 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="9249c786-5049-4e62-b0e6-5ec05ea5f61a" containerName="registry-server" Dec 05 06:18:46 crc kubenswrapper[4742]: I1205 06:18:46.094475 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rxrk8" Dec 05 06:18:46 crc kubenswrapper[4742]: I1205 06:18:46.098908 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rxrk8"] Dec 05 06:18:46 crc kubenswrapper[4742]: I1205 06:18:46.129796 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d96d8\" (UniqueName: \"kubernetes.io/projected/1f2fbe0f-30b1-432d-b029-ea7c20b064b7-kube-api-access-d96d8\") pod \"redhat-marketplace-rxrk8\" (UID: \"1f2fbe0f-30b1-432d-b029-ea7c20b064b7\") " pod="openshift-marketplace/redhat-marketplace-rxrk8" Dec 05 06:18:46 crc kubenswrapper[4742]: I1205 06:18:46.129861 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f2fbe0f-30b1-432d-b029-ea7c20b064b7-catalog-content\") pod \"redhat-marketplace-rxrk8\" (UID: \"1f2fbe0f-30b1-432d-b029-ea7c20b064b7\") " pod="openshift-marketplace/redhat-marketplace-rxrk8" Dec 05 06:18:46 crc kubenswrapper[4742]: I1205 06:18:46.129897 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f2fbe0f-30b1-432d-b029-ea7c20b064b7-utilities\") pod \"redhat-marketplace-rxrk8\" (UID: \"1f2fbe0f-30b1-432d-b029-ea7c20b064b7\") " pod="openshift-marketplace/redhat-marketplace-rxrk8" Dec 05 06:18:46 crc kubenswrapper[4742]: I1205 06:18:46.231152 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d96d8\" (UniqueName: \"kubernetes.io/projected/1f2fbe0f-30b1-432d-b029-ea7c20b064b7-kube-api-access-d96d8\") pod \"redhat-marketplace-rxrk8\" (UID: \"1f2fbe0f-30b1-432d-b029-ea7c20b064b7\") " pod="openshift-marketplace/redhat-marketplace-rxrk8" Dec 05 06:18:46 crc kubenswrapper[4742]: I1205 06:18:46.231199 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f2fbe0f-30b1-432d-b029-ea7c20b064b7-catalog-content\") pod \"redhat-marketplace-rxrk8\" (UID: \"1f2fbe0f-30b1-432d-b029-ea7c20b064b7\") " pod="openshift-marketplace/redhat-marketplace-rxrk8" Dec 05 06:18:46 crc kubenswrapper[4742]: I1205 06:18:46.231228 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f2fbe0f-30b1-432d-b029-ea7c20b064b7-utilities\") pod \"redhat-marketplace-rxrk8\" (UID: \"1f2fbe0f-30b1-432d-b029-ea7c20b064b7\") " pod="openshift-marketplace/redhat-marketplace-rxrk8" Dec 05 06:18:46 crc kubenswrapper[4742]: I1205 06:18:46.231779 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f2fbe0f-30b1-432d-b029-ea7c20b064b7-catalog-content\") pod \"redhat-marketplace-rxrk8\" (UID: \"1f2fbe0f-30b1-432d-b029-ea7c20b064b7\") " pod="openshift-marketplace/redhat-marketplace-rxrk8" Dec 05 06:18:46 crc kubenswrapper[4742]: I1205 06:18:46.231810 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f2fbe0f-30b1-432d-b029-ea7c20b064b7-utilities\") pod \"redhat-marketplace-rxrk8\" (UID: \"1f2fbe0f-30b1-432d-b029-ea7c20b064b7\") " pod="openshift-marketplace/redhat-marketplace-rxrk8" Dec 05 06:18:46 crc kubenswrapper[4742]: I1205 06:18:46.257415 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d96d8\" (UniqueName: \"kubernetes.io/projected/1f2fbe0f-30b1-432d-b029-ea7c20b064b7-kube-api-access-d96d8\") pod \"redhat-marketplace-rxrk8\" (UID: \"1f2fbe0f-30b1-432d-b029-ea7c20b064b7\") " pod="openshift-marketplace/redhat-marketplace-rxrk8" Dec 05 06:18:46 crc kubenswrapper[4742]: I1205 06:18:46.461777 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rxrk8" Dec 05 06:18:46 crc kubenswrapper[4742]: I1205 06:18:46.671386 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:18:46 crc kubenswrapper[4742]: I1205 06:18:46.671651 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:18:46 crc kubenswrapper[4742]: I1205 06:18:46.671708 4742 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" Dec 05 06:18:46 crc kubenswrapper[4742]: I1205 06:18:46.672599 4742 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"acfdc7839036ec5564d1440bf9182e5fc60d12c3795fe108a67d63d46cc78c7b"} pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 06:18:46 crc kubenswrapper[4742]: I1205 06:18:46.672666 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" containerID="cri-o://acfdc7839036ec5564d1440bf9182e5fc60d12c3795fe108a67d63d46cc78c7b" gracePeriod=600 Dec 05 06:18:46 crc kubenswrapper[4742]: E1205 06:18:46.806352 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:18:46 crc kubenswrapper[4742]: I1205 06:18:46.900338 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rxrk8"] Dec 05 06:18:46 crc kubenswrapper[4742]: W1205 06:18:46.905748 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1f2fbe0f_30b1_432d_b029_ea7c20b064b7.slice/crio-6fb3f868c225d3fd1da1a01f005d6e8d6eea467fb51a42fd7afc7dde5c38bd91 WatchSource:0}: Error finding container 6fb3f868c225d3fd1da1a01f005d6e8d6eea467fb51a42fd7afc7dde5c38bd91: Status 404 returned error can't find the container with id 6fb3f868c225d3fd1da1a01f005d6e8d6eea467fb51a42fd7afc7dde5c38bd91 Dec 05 06:18:47 crc kubenswrapper[4742]: I1205 06:18:47.538646 4742 generic.go:334] "Generic (PLEG): container finished" podID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerID="acfdc7839036ec5564d1440bf9182e5fc60d12c3795fe108a67d63d46cc78c7b" exitCode=0 Dec 05 06:18:47 crc kubenswrapper[4742]: I1205 06:18:47.538796 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerDied","Data":"acfdc7839036ec5564d1440bf9182e5fc60d12c3795fe108a67d63d46cc78c7b"} Dec 05 06:18:47 crc kubenswrapper[4742]: I1205 06:18:47.539344 4742 scope.go:117] "RemoveContainer" containerID="2a0650cb5fb1ecf5b2a54d2428e362d6056d9793a00fef4de45d1cd9dff294dd" Dec 05 06:18:47 crc kubenswrapper[4742]: I1205 06:18:47.540295 4742 scope.go:117] "RemoveContainer" containerID="acfdc7839036ec5564d1440bf9182e5fc60d12c3795fe108a67d63d46cc78c7b" Dec 05 06:18:47 crc kubenswrapper[4742]: E1205 06:18:47.540879 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:18:47 crc kubenswrapper[4742]: I1205 06:18:47.544465 4742 generic.go:334] "Generic (PLEG): container finished" podID="1f2fbe0f-30b1-432d-b029-ea7c20b064b7" containerID="44a7d92b91f679c39e21caed23f643131572433dc63af2d6afdf7701a75cfc3c" exitCode=0 Dec 05 06:18:47 crc kubenswrapper[4742]: I1205 06:18:47.544495 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rxrk8" event={"ID":"1f2fbe0f-30b1-432d-b029-ea7c20b064b7","Type":"ContainerDied","Data":"44a7d92b91f679c39e21caed23f643131572433dc63af2d6afdf7701a75cfc3c"} Dec 05 06:18:47 crc kubenswrapper[4742]: I1205 06:18:47.544520 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rxrk8" event={"ID":"1f2fbe0f-30b1-432d-b029-ea7c20b064b7","Type":"ContainerStarted","Data":"6fb3f868c225d3fd1da1a01f005d6e8d6eea467fb51a42fd7afc7dde5c38bd91"} Dec 05 06:18:48 crc kubenswrapper[4742]: I1205 06:18:48.559029 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rxrk8" event={"ID":"1f2fbe0f-30b1-432d-b029-ea7c20b064b7","Type":"ContainerStarted","Data":"9254f8287e97cb13a3c08250f193170a3eed15eefc9a38e224a28eb063654130"} Dec 05 06:18:49 crc kubenswrapper[4742]: I1205 06:18:49.574195 4742 generic.go:334] "Generic (PLEG): container finished" podID="1f2fbe0f-30b1-432d-b029-ea7c20b064b7" containerID="9254f8287e97cb13a3c08250f193170a3eed15eefc9a38e224a28eb063654130" exitCode=0 Dec 05 06:18:49 crc kubenswrapper[4742]: I1205 06:18:49.574318 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rxrk8" event={"ID":"1f2fbe0f-30b1-432d-b029-ea7c20b064b7","Type":"ContainerDied","Data":"9254f8287e97cb13a3c08250f193170a3eed15eefc9a38e224a28eb063654130"} Dec 05 06:18:50 crc kubenswrapper[4742]: I1205 06:18:50.586366 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rxrk8" event={"ID":"1f2fbe0f-30b1-432d-b029-ea7c20b064b7","Type":"ContainerStarted","Data":"447d2817e38724b7bdb51a68eae398043d33d52bb53f47767c5347901cf1ddb8"} Dec 05 06:18:50 crc kubenswrapper[4742]: I1205 06:18:50.619488 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rxrk8" podStartSLOduration=2.208107289 podStartE2EDuration="4.619466417s" podCreationTimestamp="2025-12-05 06:18:46 +0000 UTC" firstStartedPulling="2025-12-05 06:18:47.546853355 +0000 UTC m=+1603.458988457" lastFinishedPulling="2025-12-05 06:18:49.958212523 +0000 UTC m=+1605.870347585" observedRunningTime="2025-12-05 06:18:50.61430175 +0000 UTC m=+1606.526436812" watchObservedRunningTime="2025-12-05 06:18:50.619466417 +0000 UTC m=+1606.531601479" Dec 05 06:18:56 crc kubenswrapper[4742]: I1205 06:18:56.462613 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rxrk8" Dec 05 06:18:56 crc kubenswrapper[4742]: I1205 06:18:56.463004 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rxrk8" Dec 05 06:18:56 crc kubenswrapper[4742]: I1205 06:18:56.543378 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rxrk8" Dec 05 06:18:56 crc kubenswrapper[4742]: I1205 06:18:56.711799 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rxrk8" Dec 05 06:18:56 crc kubenswrapper[4742]: I1205 06:18:56.798601 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rxrk8"] Dec 05 06:18:58 crc kubenswrapper[4742]: I1205 06:18:58.666324 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-rxrk8" podUID="1f2fbe0f-30b1-432d-b029-ea7c20b064b7" containerName="registry-server" containerID="cri-o://447d2817e38724b7bdb51a68eae398043d33d52bb53f47767c5347901cf1ddb8" gracePeriod=2 Dec 05 06:18:59 crc kubenswrapper[4742]: I1205 06:18:59.624630 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rxrk8" Dec 05 06:18:59 crc kubenswrapper[4742]: I1205 06:18:59.665186 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f2fbe0f-30b1-432d-b029-ea7c20b064b7-catalog-content\") pod \"1f2fbe0f-30b1-432d-b029-ea7c20b064b7\" (UID: \"1f2fbe0f-30b1-432d-b029-ea7c20b064b7\") " Dec 05 06:18:59 crc kubenswrapper[4742]: I1205 06:18:59.665420 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f2fbe0f-30b1-432d-b029-ea7c20b064b7-utilities\") pod \"1f2fbe0f-30b1-432d-b029-ea7c20b064b7\" (UID: \"1f2fbe0f-30b1-432d-b029-ea7c20b064b7\") " Dec 05 06:18:59 crc kubenswrapper[4742]: I1205 06:18:59.665462 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d96d8\" (UniqueName: \"kubernetes.io/projected/1f2fbe0f-30b1-432d-b029-ea7c20b064b7-kube-api-access-d96d8\") pod \"1f2fbe0f-30b1-432d-b029-ea7c20b064b7\" (UID: \"1f2fbe0f-30b1-432d-b029-ea7c20b064b7\") " Dec 05 06:18:59 crc kubenswrapper[4742]: I1205 06:18:59.666287 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f2fbe0f-30b1-432d-b029-ea7c20b064b7-utilities" (OuterVolumeSpecName: "utilities") pod "1f2fbe0f-30b1-432d-b029-ea7c20b064b7" (UID: "1f2fbe0f-30b1-432d-b029-ea7c20b064b7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:18:59 crc kubenswrapper[4742]: I1205 06:18:59.673484 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f2fbe0f-30b1-432d-b029-ea7c20b064b7-kube-api-access-d96d8" (OuterVolumeSpecName: "kube-api-access-d96d8") pod "1f2fbe0f-30b1-432d-b029-ea7c20b064b7" (UID: "1f2fbe0f-30b1-432d-b029-ea7c20b064b7"). InnerVolumeSpecName "kube-api-access-d96d8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:18:59 crc kubenswrapper[4742]: I1205 06:18:59.681603 4742 generic.go:334] "Generic (PLEG): container finished" podID="1f2fbe0f-30b1-432d-b029-ea7c20b064b7" containerID="447d2817e38724b7bdb51a68eae398043d33d52bb53f47767c5347901cf1ddb8" exitCode=0 Dec 05 06:18:59 crc kubenswrapper[4742]: I1205 06:18:59.681644 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rxrk8" event={"ID":"1f2fbe0f-30b1-432d-b029-ea7c20b064b7","Type":"ContainerDied","Data":"447d2817e38724b7bdb51a68eae398043d33d52bb53f47767c5347901cf1ddb8"} Dec 05 06:18:59 crc kubenswrapper[4742]: I1205 06:18:59.681672 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rxrk8" event={"ID":"1f2fbe0f-30b1-432d-b029-ea7c20b064b7","Type":"ContainerDied","Data":"6fb3f868c225d3fd1da1a01f005d6e8d6eea467fb51a42fd7afc7dde5c38bd91"} Dec 05 06:18:59 crc kubenswrapper[4742]: I1205 06:18:59.681688 4742 scope.go:117] "RemoveContainer" containerID="447d2817e38724b7bdb51a68eae398043d33d52bb53f47767c5347901cf1ddb8" Dec 05 06:18:59 crc kubenswrapper[4742]: I1205 06:18:59.681699 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rxrk8" Dec 05 06:18:59 crc kubenswrapper[4742]: I1205 06:18:59.682849 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f2fbe0f-30b1-432d-b029-ea7c20b064b7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1f2fbe0f-30b1-432d-b029-ea7c20b064b7" (UID: "1f2fbe0f-30b1-432d-b029-ea7c20b064b7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:18:59 crc kubenswrapper[4742]: I1205 06:18:59.726774 4742 scope.go:117] "RemoveContainer" containerID="9254f8287e97cb13a3c08250f193170a3eed15eefc9a38e224a28eb063654130" Dec 05 06:18:59 crc kubenswrapper[4742]: I1205 06:18:59.754076 4742 scope.go:117] "RemoveContainer" containerID="44a7d92b91f679c39e21caed23f643131572433dc63af2d6afdf7701a75cfc3c" Dec 05 06:18:59 crc kubenswrapper[4742]: I1205 06:18:59.767718 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f2fbe0f-30b1-432d-b029-ea7c20b064b7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:18:59 crc kubenswrapper[4742]: I1205 06:18:59.767746 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f2fbe0f-30b1-432d-b029-ea7c20b064b7-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:18:59 crc kubenswrapper[4742]: I1205 06:18:59.767756 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d96d8\" (UniqueName: \"kubernetes.io/projected/1f2fbe0f-30b1-432d-b029-ea7c20b064b7-kube-api-access-d96d8\") on node \"crc\" DevicePath \"\"" Dec 05 06:18:59 crc kubenswrapper[4742]: I1205 06:18:59.771275 4742 scope.go:117] "RemoveContainer" containerID="447d2817e38724b7bdb51a68eae398043d33d52bb53f47767c5347901cf1ddb8" Dec 05 06:18:59 crc kubenswrapper[4742]: E1205 06:18:59.771789 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"447d2817e38724b7bdb51a68eae398043d33d52bb53f47767c5347901cf1ddb8\": container with ID starting with 447d2817e38724b7bdb51a68eae398043d33d52bb53f47767c5347901cf1ddb8 not found: ID does not exist" containerID="447d2817e38724b7bdb51a68eae398043d33d52bb53f47767c5347901cf1ddb8" Dec 05 06:18:59 crc kubenswrapper[4742]: I1205 06:18:59.771840 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"447d2817e38724b7bdb51a68eae398043d33d52bb53f47767c5347901cf1ddb8"} err="failed to get container status \"447d2817e38724b7bdb51a68eae398043d33d52bb53f47767c5347901cf1ddb8\": rpc error: code = NotFound desc = could not find container \"447d2817e38724b7bdb51a68eae398043d33d52bb53f47767c5347901cf1ddb8\": container with ID starting with 447d2817e38724b7bdb51a68eae398043d33d52bb53f47767c5347901cf1ddb8 not found: ID does not exist" Dec 05 06:18:59 crc kubenswrapper[4742]: I1205 06:18:59.771872 4742 scope.go:117] "RemoveContainer" containerID="9254f8287e97cb13a3c08250f193170a3eed15eefc9a38e224a28eb063654130" Dec 05 06:18:59 crc kubenswrapper[4742]: E1205 06:18:59.772257 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9254f8287e97cb13a3c08250f193170a3eed15eefc9a38e224a28eb063654130\": container with ID starting with 9254f8287e97cb13a3c08250f193170a3eed15eefc9a38e224a28eb063654130 not found: ID does not exist" containerID="9254f8287e97cb13a3c08250f193170a3eed15eefc9a38e224a28eb063654130" Dec 05 06:18:59 crc kubenswrapper[4742]: I1205 06:18:59.772295 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9254f8287e97cb13a3c08250f193170a3eed15eefc9a38e224a28eb063654130"} err="failed to get container status \"9254f8287e97cb13a3c08250f193170a3eed15eefc9a38e224a28eb063654130\": rpc error: code = NotFound desc = could not find container \"9254f8287e97cb13a3c08250f193170a3eed15eefc9a38e224a28eb063654130\": container with ID starting with 9254f8287e97cb13a3c08250f193170a3eed15eefc9a38e224a28eb063654130 not found: ID does not exist" Dec 05 06:18:59 crc kubenswrapper[4742]: I1205 06:18:59.772321 4742 scope.go:117] "RemoveContainer" containerID="44a7d92b91f679c39e21caed23f643131572433dc63af2d6afdf7701a75cfc3c" Dec 05 06:18:59 crc kubenswrapper[4742]: E1205 06:18:59.772780 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44a7d92b91f679c39e21caed23f643131572433dc63af2d6afdf7701a75cfc3c\": container with ID starting with 44a7d92b91f679c39e21caed23f643131572433dc63af2d6afdf7701a75cfc3c not found: ID does not exist" containerID="44a7d92b91f679c39e21caed23f643131572433dc63af2d6afdf7701a75cfc3c" Dec 05 06:18:59 crc kubenswrapper[4742]: I1205 06:18:59.772819 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44a7d92b91f679c39e21caed23f643131572433dc63af2d6afdf7701a75cfc3c"} err="failed to get container status \"44a7d92b91f679c39e21caed23f643131572433dc63af2d6afdf7701a75cfc3c\": rpc error: code = NotFound desc = could not find container \"44a7d92b91f679c39e21caed23f643131572433dc63af2d6afdf7701a75cfc3c\": container with ID starting with 44a7d92b91f679c39e21caed23f643131572433dc63af2d6afdf7701a75cfc3c not found: ID does not exist" Dec 05 06:19:00 crc kubenswrapper[4742]: I1205 06:19:00.030878 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rxrk8"] Dec 05 06:19:00 crc kubenswrapper[4742]: I1205 06:19:00.047813 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-rxrk8"] Dec 05 06:19:00 crc kubenswrapper[4742]: I1205 06:19:00.392241 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f2fbe0f-30b1-432d-b029-ea7c20b064b7" path="/var/lib/kubelet/pods/1f2fbe0f-30b1-432d-b029-ea7c20b064b7/volumes" Dec 05 06:19:01 crc kubenswrapper[4742]: I1205 06:19:01.382731 4742 scope.go:117] "RemoveContainer" containerID="acfdc7839036ec5564d1440bf9182e5fc60d12c3795fe108a67d63d46cc78c7b" Dec 05 06:19:01 crc kubenswrapper[4742]: E1205 06:19:01.383025 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:19:11 crc kubenswrapper[4742]: I1205 06:19:11.306708 4742 scope.go:117] "RemoveContainer" containerID="fb328515c2eaa845211b1a3bf7cd93c7e0738779e0a7f6ff99e15602b027e758" Dec 05 06:19:11 crc kubenswrapper[4742]: I1205 06:19:11.344134 4742 scope.go:117] "RemoveContainer" containerID="f1062878d0619a2d1539b770dcb9bb0c2fd7ff348328e25b5c911ce879853948" Dec 05 06:19:11 crc kubenswrapper[4742]: I1205 06:19:11.418424 4742 scope.go:117] "RemoveContainer" containerID="40b5ce673e98b0c1b9c40ba9310397a6bb881d9fcfffb97ec613ab9a221811aa" Dec 05 06:19:11 crc kubenswrapper[4742]: I1205 06:19:11.449475 4742 scope.go:117] "RemoveContainer" containerID="4ccf56ad8bfa048ecc6e15151ff71aa24d402ac41a1188da330cfb7757a8cfe6" Dec 05 06:19:11 crc kubenswrapper[4742]: I1205 06:19:11.485622 4742 scope.go:117] "RemoveContainer" containerID="071226b7533691457a48d507fb4948e3f54cda1b60d42e10ff66b88d8eeb9aed" Dec 05 06:19:11 crc kubenswrapper[4742]: I1205 06:19:11.514479 4742 scope.go:117] "RemoveContainer" containerID="c642c9fed985ae27dd4b8a218a8892298ed31e277a17c1b94509b1c0f51a2fb9" Dec 05 06:19:11 crc kubenswrapper[4742]: I1205 06:19:11.547781 4742 scope.go:117] "RemoveContainer" containerID="f581ecbf186f74f8dbb952b910932bfb53c37263797de4787a69cb0e9a0b066d" Dec 05 06:19:11 crc kubenswrapper[4742]: I1205 06:19:11.589769 4742 scope.go:117] "RemoveContainer" containerID="305cc444eeb82b0e93ef7fa69f0ca21257483b190f06e98feae111a5d68245ed" Dec 05 06:19:11 crc kubenswrapper[4742]: I1205 06:19:11.618921 4742 scope.go:117] "RemoveContainer" containerID="15f60914af2112465ebf23d1b3f7799dd5741321a833d188d7b7a0ab32e96194" Dec 05 06:19:16 crc kubenswrapper[4742]: I1205 06:19:16.383817 4742 scope.go:117] "RemoveContainer" containerID="acfdc7839036ec5564d1440bf9182e5fc60d12c3795fe108a67d63d46cc78c7b" Dec 05 06:19:16 crc kubenswrapper[4742]: E1205 06:19:16.384497 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:19:31 crc kubenswrapper[4742]: I1205 06:19:31.383678 4742 scope.go:117] "RemoveContainer" containerID="acfdc7839036ec5564d1440bf9182e5fc60d12c3795fe108a67d63d46cc78c7b" Dec 05 06:19:31 crc kubenswrapper[4742]: E1205 06:19:31.384821 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:19:46 crc kubenswrapper[4742]: I1205 06:19:46.382848 4742 scope.go:117] "RemoveContainer" containerID="acfdc7839036ec5564d1440bf9182e5fc60d12c3795fe108a67d63d46cc78c7b" Dec 05 06:19:46 crc kubenswrapper[4742]: E1205 06:19:46.384129 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:20:01 crc kubenswrapper[4742]: I1205 06:20:01.383262 4742 scope.go:117] "RemoveContainer" containerID="acfdc7839036ec5564d1440bf9182e5fc60d12c3795fe108a67d63d46cc78c7b" Dec 05 06:20:01 crc kubenswrapper[4742]: E1205 06:20:01.384419 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:20:11 crc kubenswrapper[4742]: I1205 06:20:11.848933 4742 scope.go:117] "RemoveContainer" containerID="e6ae9e5aa6d032b3ba8965356ae8b4ff4257705bc6f7ae6526108bfef473a057" Dec 05 06:20:11 crc kubenswrapper[4742]: I1205 06:20:11.911425 4742 scope.go:117] "RemoveContainer" containerID="c929e4d68a7edf82190727f895b54f4ccde921cfe66539dcbe06759346886432" Dec 05 06:20:11 crc kubenswrapper[4742]: I1205 06:20:11.946187 4742 scope.go:117] "RemoveContainer" containerID="96d5fa9cee2959bc805b039bf843ed8935b507be63f1df35905b848a55c83d14" Dec 05 06:20:11 crc kubenswrapper[4742]: I1205 06:20:11.999153 4742 scope.go:117] "RemoveContainer" containerID="85c2b0d2bbfb8b6e3c396234c6a2e7332b515e33d6f3309f0bbf9466f03f62a0" Dec 05 06:20:15 crc kubenswrapper[4742]: I1205 06:20:15.384632 4742 scope.go:117] "RemoveContainer" containerID="acfdc7839036ec5564d1440bf9182e5fc60d12c3795fe108a67d63d46cc78c7b" Dec 05 06:20:15 crc kubenswrapper[4742]: E1205 06:20:15.385796 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:20:30 crc kubenswrapper[4742]: I1205 06:20:30.384608 4742 scope.go:117] "RemoveContainer" containerID="acfdc7839036ec5564d1440bf9182e5fc60d12c3795fe108a67d63d46cc78c7b" Dec 05 06:20:30 crc kubenswrapper[4742]: E1205 06:20:30.385700 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:20:45 crc kubenswrapper[4742]: I1205 06:20:45.382433 4742 scope.go:117] "RemoveContainer" containerID="acfdc7839036ec5564d1440bf9182e5fc60d12c3795fe108a67d63d46cc78c7b" Dec 05 06:20:45 crc kubenswrapper[4742]: E1205 06:20:45.383204 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:20:58 crc kubenswrapper[4742]: I1205 06:20:58.383198 4742 scope.go:117] "RemoveContainer" containerID="acfdc7839036ec5564d1440bf9182e5fc60d12c3795fe108a67d63d46cc78c7b" Dec 05 06:20:58 crc kubenswrapper[4742]: E1205 06:20:58.384052 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:21:11 crc kubenswrapper[4742]: I1205 06:21:11.383027 4742 scope.go:117] "RemoveContainer" containerID="acfdc7839036ec5564d1440bf9182e5fc60d12c3795fe108a67d63d46cc78c7b" Dec 05 06:21:11 crc kubenswrapper[4742]: E1205 06:21:11.384273 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:21:12 crc kubenswrapper[4742]: I1205 06:21:12.113261 4742 scope.go:117] "RemoveContainer" containerID="3658c165323def6855dfad073a0557e3f845baf13e8fbe77d9212b3a4c2386aa" Dec 05 06:21:22 crc kubenswrapper[4742]: I1205 06:21:22.383877 4742 scope.go:117] "RemoveContainer" containerID="acfdc7839036ec5564d1440bf9182e5fc60d12c3795fe108a67d63d46cc78c7b" Dec 05 06:21:22 crc kubenswrapper[4742]: E1205 06:21:22.385182 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:21:34 crc kubenswrapper[4742]: I1205 06:21:34.389851 4742 scope.go:117] "RemoveContainer" containerID="acfdc7839036ec5564d1440bf9182e5fc60d12c3795fe108a67d63d46cc78c7b" Dec 05 06:21:34 crc kubenswrapper[4742]: E1205 06:21:34.390920 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:21:46 crc kubenswrapper[4742]: I1205 06:21:46.385329 4742 scope.go:117] "RemoveContainer" containerID="acfdc7839036ec5564d1440bf9182e5fc60d12c3795fe108a67d63d46cc78c7b" Dec 05 06:21:46 crc kubenswrapper[4742]: E1205 06:21:46.386574 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:22:02 crc kubenswrapper[4742]: I1205 06:22:02.384142 4742 scope.go:117] "RemoveContainer" containerID="acfdc7839036ec5564d1440bf9182e5fc60d12c3795fe108a67d63d46cc78c7b" Dec 05 06:22:02 crc kubenswrapper[4742]: E1205 06:22:02.384965 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:22:12 crc kubenswrapper[4742]: I1205 06:22:12.207994 4742 scope.go:117] "RemoveContainer" containerID="5a696de0ae41e7833ee83f7613624a0e873d7ff3c9d51fceb0d56ed64b7f8f9d" Dec 05 06:22:12 crc kubenswrapper[4742]: I1205 06:22:12.232009 4742 scope.go:117] "RemoveContainer" containerID="409654510ed79dcada2dee8cd274d71ff06fbb890d292b51685c3f9fa3bb7761" Dec 05 06:22:12 crc kubenswrapper[4742]: I1205 06:22:12.262160 4742 scope.go:117] "RemoveContainer" containerID="3840798ac23a9b0863d3c14f3c0de6637145f225e4b32a1a83018f90c174fe6e" Dec 05 06:22:12 crc kubenswrapper[4742]: I1205 06:22:12.290117 4742 scope.go:117] "RemoveContainer" containerID="283b7e47966c3e7e48227899036a6b2462d6b7f1bae3051a2674f4656193492e" Dec 05 06:22:12 crc kubenswrapper[4742]: I1205 06:22:12.323301 4742 scope.go:117] "RemoveContainer" containerID="ff2c23489eeb12a736b3cfe4f0639cf009f56c7dbb016087059c4fd472b82f8d" Dec 05 06:22:15 crc kubenswrapper[4742]: I1205 06:22:15.382977 4742 scope.go:117] "RemoveContainer" containerID="acfdc7839036ec5564d1440bf9182e5fc60d12c3795fe108a67d63d46cc78c7b" Dec 05 06:22:15 crc kubenswrapper[4742]: E1205 06:22:15.383798 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:22:29 crc kubenswrapper[4742]: I1205 06:22:29.383221 4742 scope.go:117] "RemoveContainer" containerID="acfdc7839036ec5564d1440bf9182e5fc60d12c3795fe108a67d63d46cc78c7b" Dec 05 06:22:29 crc kubenswrapper[4742]: E1205 06:22:29.384181 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:22:35 crc kubenswrapper[4742]: I1205 06:22:35.305027 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-gz567"] Dec 05 06:22:35 crc kubenswrapper[4742]: E1205 06:22:35.305931 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f2fbe0f-30b1-432d-b029-ea7c20b064b7" containerName="extract-utilities" Dec 05 06:22:35 crc kubenswrapper[4742]: I1205 06:22:35.305966 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f2fbe0f-30b1-432d-b029-ea7c20b064b7" containerName="extract-utilities" Dec 05 06:22:35 crc kubenswrapper[4742]: E1205 06:22:35.305994 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f2fbe0f-30b1-432d-b029-ea7c20b064b7" containerName="extract-content" Dec 05 06:22:35 crc kubenswrapper[4742]: I1205 06:22:35.306006 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f2fbe0f-30b1-432d-b029-ea7c20b064b7" containerName="extract-content" Dec 05 06:22:35 crc kubenswrapper[4742]: E1205 06:22:35.306028 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f2fbe0f-30b1-432d-b029-ea7c20b064b7" containerName="registry-server" Dec 05 06:22:35 crc kubenswrapper[4742]: I1205 06:22:35.306038 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f2fbe0f-30b1-432d-b029-ea7c20b064b7" containerName="registry-server" Dec 05 06:22:35 crc kubenswrapper[4742]: I1205 06:22:35.306375 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f2fbe0f-30b1-432d-b029-ea7c20b064b7" containerName="registry-server" Dec 05 06:22:35 crc kubenswrapper[4742]: I1205 06:22:35.308024 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gz567" Dec 05 06:22:35 crc kubenswrapper[4742]: I1205 06:22:35.321652 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gz567"] Dec 05 06:22:35 crc kubenswrapper[4742]: I1205 06:22:35.446469 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5kgz\" (UniqueName: \"kubernetes.io/projected/3495a277-21fc-4ae0-a2c6-ddfe824ee90d-kube-api-access-f5kgz\") pod \"certified-operators-gz567\" (UID: \"3495a277-21fc-4ae0-a2c6-ddfe824ee90d\") " pod="openshift-marketplace/certified-operators-gz567" Dec 05 06:22:35 crc kubenswrapper[4742]: I1205 06:22:35.446538 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3495a277-21fc-4ae0-a2c6-ddfe824ee90d-catalog-content\") pod \"certified-operators-gz567\" (UID: \"3495a277-21fc-4ae0-a2c6-ddfe824ee90d\") " pod="openshift-marketplace/certified-operators-gz567" Dec 05 06:22:35 crc kubenswrapper[4742]: I1205 06:22:35.446878 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3495a277-21fc-4ae0-a2c6-ddfe824ee90d-utilities\") pod \"certified-operators-gz567\" (UID: \"3495a277-21fc-4ae0-a2c6-ddfe824ee90d\") " pod="openshift-marketplace/certified-operators-gz567" Dec 05 06:22:35 crc kubenswrapper[4742]: I1205 06:22:35.547863 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5kgz\" (UniqueName: \"kubernetes.io/projected/3495a277-21fc-4ae0-a2c6-ddfe824ee90d-kube-api-access-f5kgz\") pod \"certified-operators-gz567\" (UID: \"3495a277-21fc-4ae0-a2c6-ddfe824ee90d\") " pod="openshift-marketplace/certified-operators-gz567" Dec 05 06:22:35 crc kubenswrapper[4742]: I1205 06:22:35.548371 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3495a277-21fc-4ae0-a2c6-ddfe824ee90d-catalog-content\") pod \"certified-operators-gz567\" (UID: \"3495a277-21fc-4ae0-a2c6-ddfe824ee90d\") " pod="openshift-marketplace/certified-operators-gz567" Dec 05 06:22:35 crc kubenswrapper[4742]: I1205 06:22:35.548902 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3495a277-21fc-4ae0-a2c6-ddfe824ee90d-catalog-content\") pod \"certified-operators-gz567\" (UID: \"3495a277-21fc-4ae0-a2c6-ddfe824ee90d\") " pod="openshift-marketplace/certified-operators-gz567" Dec 05 06:22:35 crc kubenswrapper[4742]: I1205 06:22:35.549103 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3495a277-21fc-4ae0-a2c6-ddfe824ee90d-utilities\") pod \"certified-operators-gz567\" (UID: \"3495a277-21fc-4ae0-a2c6-ddfe824ee90d\") " pod="openshift-marketplace/certified-operators-gz567" Dec 05 06:22:35 crc kubenswrapper[4742]: I1205 06:22:35.549420 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3495a277-21fc-4ae0-a2c6-ddfe824ee90d-utilities\") pod \"certified-operators-gz567\" (UID: \"3495a277-21fc-4ae0-a2c6-ddfe824ee90d\") " pod="openshift-marketplace/certified-operators-gz567" Dec 05 06:22:35 crc kubenswrapper[4742]: I1205 06:22:35.590935 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5kgz\" (UniqueName: \"kubernetes.io/projected/3495a277-21fc-4ae0-a2c6-ddfe824ee90d-kube-api-access-f5kgz\") pod \"certified-operators-gz567\" (UID: \"3495a277-21fc-4ae0-a2c6-ddfe824ee90d\") " pod="openshift-marketplace/certified-operators-gz567" Dec 05 06:22:35 crc kubenswrapper[4742]: I1205 06:22:35.642924 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gz567" Dec 05 06:22:36 crc kubenswrapper[4742]: I1205 06:22:36.096255 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gz567"] Dec 05 06:22:36 crc kubenswrapper[4742]: I1205 06:22:36.821097 4742 generic.go:334] "Generic (PLEG): container finished" podID="3495a277-21fc-4ae0-a2c6-ddfe824ee90d" containerID="6aba5eeaa732acfcfe32608adb72ab44ee27adce5a5ba13ec185739bdc3e6131" exitCode=0 Dec 05 06:22:36 crc kubenswrapper[4742]: I1205 06:22:36.821270 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gz567" event={"ID":"3495a277-21fc-4ae0-a2c6-ddfe824ee90d","Type":"ContainerDied","Data":"6aba5eeaa732acfcfe32608adb72ab44ee27adce5a5ba13ec185739bdc3e6131"} Dec 05 06:22:36 crc kubenswrapper[4742]: I1205 06:22:36.821474 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gz567" event={"ID":"3495a277-21fc-4ae0-a2c6-ddfe824ee90d","Type":"ContainerStarted","Data":"03689c80b3421b3bf6e4059800d0c5f6e5c5fadc8b4530b0a7c6b77a41b643c8"} Dec 05 06:22:36 crc kubenswrapper[4742]: I1205 06:22:36.823780 4742 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 06:22:38 crc kubenswrapper[4742]: I1205 06:22:38.849723 4742 generic.go:334] "Generic (PLEG): container finished" podID="3495a277-21fc-4ae0-a2c6-ddfe824ee90d" containerID="6bd6a452c7bc3769dd53de28b226010899e4edda99b2b88f77a7ac54740c1000" exitCode=0 Dec 05 06:22:38 crc kubenswrapper[4742]: I1205 06:22:38.849791 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gz567" event={"ID":"3495a277-21fc-4ae0-a2c6-ddfe824ee90d","Type":"ContainerDied","Data":"6bd6a452c7bc3769dd53de28b226010899e4edda99b2b88f77a7ac54740c1000"} Dec 05 06:22:39 crc kubenswrapper[4742]: I1205 06:22:39.865423 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gz567" event={"ID":"3495a277-21fc-4ae0-a2c6-ddfe824ee90d","Type":"ContainerStarted","Data":"e6fbb366754efb60995c2872f203817c6579cff145e83bc743ccad63385f011c"} Dec 05 06:22:39 crc kubenswrapper[4742]: I1205 06:22:39.900607 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-gz567" podStartSLOduration=2.469115648 podStartE2EDuration="4.9005853s" podCreationTimestamp="2025-12-05 06:22:35 +0000 UTC" firstStartedPulling="2025-12-05 06:22:36.82341683 +0000 UTC m=+1832.735551902" lastFinishedPulling="2025-12-05 06:22:39.254886472 +0000 UTC m=+1835.167021554" observedRunningTime="2025-12-05 06:22:39.893732497 +0000 UTC m=+1835.805867629" watchObservedRunningTime="2025-12-05 06:22:39.9005853 +0000 UTC m=+1835.812720372" Dec 05 06:22:43 crc kubenswrapper[4742]: I1205 06:22:43.383264 4742 scope.go:117] "RemoveContainer" containerID="acfdc7839036ec5564d1440bf9182e5fc60d12c3795fe108a67d63d46cc78c7b" Dec 05 06:22:43 crc kubenswrapper[4742]: E1205 06:22:43.383914 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:22:45 crc kubenswrapper[4742]: I1205 06:22:45.643695 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-gz567" Dec 05 06:22:45 crc kubenswrapper[4742]: I1205 06:22:45.644133 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-gz567" Dec 05 06:22:45 crc kubenswrapper[4742]: I1205 06:22:45.719818 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-gz567" Dec 05 06:22:45 crc kubenswrapper[4742]: I1205 06:22:45.968982 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-gz567" Dec 05 06:22:46 crc kubenswrapper[4742]: I1205 06:22:46.038428 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gz567"] Dec 05 06:22:47 crc kubenswrapper[4742]: I1205 06:22:47.945159 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-gz567" podUID="3495a277-21fc-4ae0-a2c6-ddfe824ee90d" containerName="registry-server" containerID="cri-o://e6fbb366754efb60995c2872f203817c6579cff145e83bc743ccad63385f011c" gracePeriod=2 Dec 05 06:22:48 crc kubenswrapper[4742]: I1205 06:22:48.377517 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gz567" Dec 05 06:22:48 crc kubenswrapper[4742]: I1205 06:22:48.505733 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f5kgz\" (UniqueName: \"kubernetes.io/projected/3495a277-21fc-4ae0-a2c6-ddfe824ee90d-kube-api-access-f5kgz\") pod \"3495a277-21fc-4ae0-a2c6-ddfe824ee90d\" (UID: \"3495a277-21fc-4ae0-a2c6-ddfe824ee90d\") " Dec 05 06:22:48 crc kubenswrapper[4742]: I1205 06:22:48.505832 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3495a277-21fc-4ae0-a2c6-ddfe824ee90d-utilities\") pod \"3495a277-21fc-4ae0-a2c6-ddfe824ee90d\" (UID: \"3495a277-21fc-4ae0-a2c6-ddfe824ee90d\") " Dec 05 06:22:48 crc kubenswrapper[4742]: I1205 06:22:48.505945 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3495a277-21fc-4ae0-a2c6-ddfe824ee90d-catalog-content\") pod \"3495a277-21fc-4ae0-a2c6-ddfe824ee90d\" (UID: \"3495a277-21fc-4ae0-a2c6-ddfe824ee90d\") " Dec 05 06:22:48 crc kubenswrapper[4742]: I1205 06:22:48.507241 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3495a277-21fc-4ae0-a2c6-ddfe824ee90d-utilities" (OuterVolumeSpecName: "utilities") pod "3495a277-21fc-4ae0-a2c6-ddfe824ee90d" (UID: "3495a277-21fc-4ae0-a2c6-ddfe824ee90d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:22:48 crc kubenswrapper[4742]: I1205 06:22:48.513370 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3495a277-21fc-4ae0-a2c6-ddfe824ee90d-kube-api-access-f5kgz" (OuterVolumeSpecName: "kube-api-access-f5kgz") pod "3495a277-21fc-4ae0-a2c6-ddfe824ee90d" (UID: "3495a277-21fc-4ae0-a2c6-ddfe824ee90d"). InnerVolumeSpecName "kube-api-access-f5kgz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:22:48 crc kubenswrapper[4742]: I1205 06:22:48.568953 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3495a277-21fc-4ae0-a2c6-ddfe824ee90d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3495a277-21fc-4ae0-a2c6-ddfe824ee90d" (UID: "3495a277-21fc-4ae0-a2c6-ddfe824ee90d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:22:48 crc kubenswrapper[4742]: I1205 06:22:48.607619 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3495a277-21fc-4ae0-a2c6-ddfe824ee90d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:22:48 crc kubenswrapper[4742]: I1205 06:22:48.607649 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f5kgz\" (UniqueName: \"kubernetes.io/projected/3495a277-21fc-4ae0-a2c6-ddfe824ee90d-kube-api-access-f5kgz\") on node \"crc\" DevicePath \"\"" Dec 05 06:22:48 crc kubenswrapper[4742]: I1205 06:22:48.607663 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3495a277-21fc-4ae0-a2c6-ddfe824ee90d-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:22:48 crc kubenswrapper[4742]: I1205 06:22:48.954996 4742 generic.go:334] "Generic (PLEG): container finished" podID="3495a277-21fc-4ae0-a2c6-ddfe824ee90d" containerID="e6fbb366754efb60995c2872f203817c6579cff145e83bc743ccad63385f011c" exitCode=0 Dec 05 06:22:48 crc kubenswrapper[4742]: I1205 06:22:48.955082 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gz567" event={"ID":"3495a277-21fc-4ae0-a2c6-ddfe824ee90d","Type":"ContainerDied","Data":"e6fbb366754efb60995c2872f203817c6579cff145e83bc743ccad63385f011c"} Dec 05 06:22:48 crc kubenswrapper[4742]: I1205 06:22:48.955125 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gz567" event={"ID":"3495a277-21fc-4ae0-a2c6-ddfe824ee90d","Type":"ContainerDied","Data":"03689c80b3421b3bf6e4059800d0c5f6e5c5fadc8b4530b0a7c6b77a41b643c8"} Dec 05 06:22:48 crc kubenswrapper[4742]: I1205 06:22:48.955146 4742 scope.go:117] "RemoveContainer" containerID="e6fbb366754efb60995c2872f203817c6579cff145e83bc743ccad63385f011c" Dec 05 06:22:48 crc kubenswrapper[4742]: I1205 06:22:48.955152 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gz567" Dec 05 06:22:48 crc kubenswrapper[4742]: I1205 06:22:48.985042 4742 scope.go:117] "RemoveContainer" containerID="6bd6a452c7bc3769dd53de28b226010899e4edda99b2b88f77a7ac54740c1000" Dec 05 06:22:49 crc kubenswrapper[4742]: I1205 06:22:49.000676 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gz567"] Dec 05 06:22:49 crc kubenswrapper[4742]: I1205 06:22:49.005562 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-gz567"] Dec 05 06:22:49 crc kubenswrapper[4742]: I1205 06:22:49.021095 4742 scope.go:117] "RemoveContainer" containerID="6aba5eeaa732acfcfe32608adb72ab44ee27adce5a5ba13ec185739bdc3e6131" Dec 05 06:22:49 crc kubenswrapper[4742]: I1205 06:22:49.038620 4742 scope.go:117] "RemoveContainer" containerID="e6fbb366754efb60995c2872f203817c6579cff145e83bc743ccad63385f011c" Dec 05 06:22:49 crc kubenswrapper[4742]: E1205 06:22:49.038915 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6fbb366754efb60995c2872f203817c6579cff145e83bc743ccad63385f011c\": container with ID starting with e6fbb366754efb60995c2872f203817c6579cff145e83bc743ccad63385f011c not found: ID does not exist" containerID="e6fbb366754efb60995c2872f203817c6579cff145e83bc743ccad63385f011c" Dec 05 06:22:49 crc kubenswrapper[4742]: I1205 06:22:49.038946 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6fbb366754efb60995c2872f203817c6579cff145e83bc743ccad63385f011c"} err="failed to get container status \"e6fbb366754efb60995c2872f203817c6579cff145e83bc743ccad63385f011c\": rpc error: code = NotFound desc = could not find container \"e6fbb366754efb60995c2872f203817c6579cff145e83bc743ccad63385f011c\": container with ID starting with e6fbb366754efb60995c2872f203817c6579cff145e83bc743ccad63385f011c not found: ID does not exist" Dec 05 06:22:49 crc kubenswrapper[4742]: I1205 06:22:49.038968 4742 scope.go:117] "RemoveContainer" containerID="6bd6a452c7bc3769dd53de28b226010899e4edda99b2b88f77a7ac54740c1000" Dec 05 06:22:49 crc kubenswrapper[4742]: E1205 06:22:49.039241 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6bd6a452c7bc3769dd53de28b226010899e4edda99b2b88f77a7ac54740c1000\": container with ID starting with 6bd6a452c7bc3769dd53de28b226010899e4edda99b2b88f77a7ac54740c1000 not found: ID does not exist" containerID="6bd6a452c7bc3769dd53de28b226010899e4edda99b2b88f77a7ac54740c1000" Dec 05 06:22:49 crc kubenswrapper[4742]: I1205 06:22:49.039261 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bd6a452c7bc3769dd53de28b226010899e4edda99b2b88f77a7ac54740c1000"} err="failed to get container status \"6bd6a452c7bc3769dd53de28b226010899e4edda99b2b88f77a7ac54740c1000\": rpc error: code = NotFound desc = could not find container \"6bd6a452c7bc3769dd53de28b226010899e4edda99b2b88f77a7ac54740c1000\": container with ID starting with 6bd6a452c7bc3769dd53de28b226010899e4edda99b2b88f77a7ac54740c1000 not found: ID does not exist" Dec 05 06:22:49 crc kubenswrapper[4742]: I1205 06:22:49.039275 4742 scope.go:117] "RemoveContainer" containerID="6aba5eeaa732acfcfe32608adb72ab44ee27adce5a5ba13ec185739bdc3e6131" Dec 05 06:22:49 crc kubenswrapper[4742]: E1205 06:22:49.039553 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6aba5eeaa732acfcfe32608adb72ab44ee27adce5a5ba13ec185739bdc3e6131\": container with ID starting with 6aba5eeaa732acfcfe32608adb72ab44ee27adce5a5ba13ec185739bdc3e6131 not found: ID does not exist" containerID="6aba5eeaa732acfcfe32608adb72ab44ee27adce5a5ba13ec185739bdc3e6131" Dec 05 06:22:49 crc kubenswrapper[4742]: I1205 06:22:49.039600 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6aba5eeaa732acfcfe32608adb72ab44ee27adce5a5ba13ec185739bdc3e6131"} err="failed to get container status \"6aba5eeaa732acfcfe32608adb72ab44ee27adce5a5ba13ec185739bdc3e6131\": rpc error: code = NotFound desc = could not find container \"6aba5eeaa732acfcfe32608adb72ab44ee27adce5a5ba13ec185739bdc3e6131\": container with ID starting with 6aba5eeaa732acfcfe32608adb72ab44ee27adce5a5ba13ec185739bdc3e6131 not found: ID does not exist" Dec 05 06:22:50 crc kubenswrapper[4742]: I1205 06:22:50.395049 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3495a277-21fc-4ae0-a2c6-ddfe824ee90d" path="/var/lib/kubelet/pods/3495a277-21fc-4ae0-a2c6-ddfe824ee90d/volumes" Dec 05 06:22:55 crc kubenswrapper[4742]: I1205 06:22:55.383123 4742 scope.go:117] "RemoveContainer" containerID="acfdc7839036ec5564d1440bf9182e5fc60d12c3795fe108a67d63d46cc78c7b" Dec 05 06:22:55 crc kubenswrapper[4742]: E1205 06:22:55.383776 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:23:08 crc kubenswrapper[4742]: I1205 06:23:08.382275 4742 scope.go:117] "RemoveContainer" containerID="acfdc7839036ec5564d1440bf9182e5fc60d12c3795fe108a67d63d46cc78c7b" Dec 05 06:23:08 crc kubenswrapper[4742]: E1205 06:23:08.383079 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:23:21 crc kubenswrapper[4742]: I1205 06:23:21.382366 4742 scope.go:117] "RemoveContainer" containerID="acfdc7839036ec5564d1440bf9182e5fc60d12c3795fe108a67d63d46cc78c7b" Dec 05 06:23:21 crc kubenswrapper[4742]: E1205 06:23:21.383361 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:23:35 crc kubenswrapper[4742]: I1205 06:23:35.384083 4742 scope.go:117] "RemoveContainer" containerID="acfdc7839036ec5564d1440bf9182e5fc60d12c3795fe108a67d63d46cc78c7b" Dec 05 06:23:35 crc kubenswrapper[4742]: E1205 06:23:35.384862 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:23:50 crc kubenswrapper[4742]: I1205 06:23:50.383163 4742 scope.go:117] "RemoveContainer" containerID="acfdc7839036ec5564d1440bf9182e5fc60d12c3795fe108a67d63d46cc78c7b" Dec 05 06:23:51 crc kubenswrapper[4742]: I1205 06:23:51.559260 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerStarted","Data":"12fb54377caf5ec8b430c9e38975da830717543ac04874595b16f5a28eb29666"} Dec 05 06:26:16 crc kubenswrapper[4742]: I1205 06:26:16.672081 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:26:16 crc kubenswrapper[4742]: I1205 06:26:16.672819 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:26:46 crc kubenswrapper[4742]: I1205 06:26:46.671625 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:26:46 crc kubenswrapper[4742]: I1205 06:26:46.672266 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:27:16 crc kubenswrapper[4742]: I1205 06:27:16.671712 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:27:16 crc kubenswrapper[4742]: I1205 06:27:16.672491 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:27:16 crc kubenswrapper[4742]: I1205 06:27:16.672554 4742 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" Dec 05 06:27:16 crc kubenswrapper[4742]: I1205 06:27:16.673423 4742 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"12fb54377caf5ec8b430c9e38975da830717543ac04874595b16f5a28eb29666"} pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 06:27:16 crc kubenswrapper[4742]: I1205 06:27:16.673553 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" containerID="cri-o://12fb54377caf5ec8b430c9e38975da830717543ac04874595b16f5a28eb29666" gracePeriod=600 Dec 05 06:27:17 crc kubenswrapper[4742]: I1205 06:27:17.524755 4742 generic.go:334] "Generic (PLEG): container finished" podID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerID="12fb54377caf5ec8b430c9e38975da830717543ac04874595b16f5a28eb29666" exitCode=0 Dec 05 06:27:17 crc kubenswrapper[4742]: I1205 06:27:17.524804 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerDied","Data":"12fb54377caf5ec8b430c9e38975da830717543ac04874595b16f5a28eb29666"} Dec 05 06:27:17 crc kubenswrapper[4742]: I1205 06:27:17.525106 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerStarted","Data":"e26c380b9e3c7a5dc2fec19a469a1ba3ec7073bff64fb3eac4cc5c5188052be6"} Dec 05 06:27:17 crc kubenswrapper[4742]: I1205 06:27:17.525122 4742 scope.go:117] "RemoveContainer" containerID="acfdc7839036ec5564d1440bf9182e5fc60d12c3795fe108a67d63d46cc78c7b" Dec 05 06:29:01 crc kubenswrapper[4742]: I1205 06:29:01.340745 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-g7nwd"] Dec 05 06:29:01 crc kubenswrapper[4742]: E1205 06:29:01.341754 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3495a277-21fc-4ae0-a2c6-ddfe824ee90d" containerName="extract-content" Dec 05 06:29:01 crc kubenswrapper[4742]: I1205 06:29:01.341774 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="3495a277-21fc-4ae0-a2c6-ddfe824ee90d" containerName="extract-content" Dec 05 06:29:01 crc kubenswrapper[4742]: E1205 06:29:01.341800 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3495a277-21fc-4ae0-a2c6-ddfe824ee90d" containerName="registry-server" Dec 05 06:29:01 crc kubenswrapper[4742]: I1205 06:29:01.341811 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="3495a277-21fc-4ae0-a2c6-ddfe824ee90d" containerName="registry-server" Dec 05 06:29:01 crc kubenswrapper[4742]: E1205 06:29:01.341827 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3495a277-21fc-4ae0-a2c6-ddfe824ee90d" containerName="extract-utilities" Dec 05 06:29:01 crc kubenswrapper[4742]: I1205 06:29:01.341838 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="3495a277-21fc-4ae0-a2c6-ddfe824ee90d" containerName="extract-utilities" Dec 05 06:29:01 crc kubenswrapper[4742]: I1205 06:29:01.342112 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="3495a277-21fc-4ae0-a2c6-ddfe824ee90d" containerName="registry-server" Dec 05 06:29:01 crc kubenswrapper[4742]: I1205 06:29:01.343795 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g7nwd" Dec 05 06:29:01 crc kubenswrapper[4742]: I1205 06:29:01.362559 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-g7nwd"] Dec 05 06:29:01 crc kubenswrapper[4742]: I1205 06:29:01.438842 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43a8a280-2785-4b8e-b657-2fdb94415c41-utilities\") pod \"community-operators-g7nwd\" (UID: \"43a8a280-2785-4b8e-b657-2fdb94415c41\") " pod="openshift-marketplace/community-operators-g7nwd" Dec 05 06:29:01 crc kubenswrapper[4742]: I1205 06:29:01.438917 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hczdg\" (UniqueName: \"kubernetes.io/projected/43a8a280-2785-4b8e-b657-2fdb94415c41-kube-api-access-hczdg\") pod \"community-operators-g7nwd\" (UID: \"43a8a280-2785-4b8e-b657-2fdb94415c41\") " pod="openshift-marketplace/community-operators-g7nwd" Dec 05 06:29:01 crc kubenswrapper[4742]: I1205 06:29:01.438962 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43a8a280-2785-4b8e-b657-2fdb94415c41-catalog-content\") pod \"community-operators-g7nwd\" (UID: \"43a8a280-2785-4b8e-b657-2fdb94415c41\") " pod="openshift-marketplace/community-operators-g7nwd" Dec 05 06:29:01 crc kubenswrapper[4742]: I1205 06:29:01.540224 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43a8a280-2785-4b8e-b657-2fdb94415c41-utilities\") pod \"community-operators-g7nwd\" (UID: \"43a8a280-2785-4b8e-b657-2fdb94415c41\") " pod="openshift-marketplace/community-operators-g7nwd" Dec 05 06:29:01 crc kubenswrapper[4742]: I1205 06:29:01.540319 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hczdg\" (UniqueName: \"kubernetes.io/projected/43a8a280-2785-4b8e-b657-2fdb94415c41-kube-api-access-hczdg\") pod \"community-operators-g7nwd\" (UID: \"43a8a280-2785-4b8e-b657-2fdb94415c41\") " pod="openshift-marketplace/community-operators-g7nwd" Dec 05 06:29:01 crc kubenswrapper[4742]: I1205 06:29:01.540387 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43a8a280-2785-4b8e-b657-2fdb94415c41-catalog-content\") pod \"community-operators-g7nwd\" (UID: \"43a8a280-2785-4b8e-b657-2fdb94415c41\") " pod="openshift-marketplace/community-operators-g7nwd" Dec 05 06:29:01 crc kubenswrapper[4742]: I1205 06:29:01.542386 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43a8a280-2785-4b8e-b657-2fdb94415c41-catalog-content\") pod \"community-operators-g7nwd\" (UID: \"43a8a280-2785-4b8e-b657-2fdb94415c41\") " pod="openshift-marketplace/community-operators-g7nwd" Dec 05 06:29:01 crc kubenswrapper[4742]: I1205 06:29:01.542386 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43a8a280-2785-4b8e-b657-2fdb94415c41-utilities\") pod \"community-operators-g7nwd\" (UID: \"43a8a280-2785-4b8e-b657-2fdb94415c41\") " pod="openshift-marketplace/community-operators-g7nwd" Dec 05 06:29:01 crc kubenswrapper[4742]: I1205 06:29:01.560511 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hczdg\" (UniqueName: \"kubernetes.io/projected/43a8a280-2785-4b8e-b657-2fdb94415c41-kube-api-access-hczdg\") pod \"community-operators-g7nwd\" (UID: \"43a8a280-2785-4b8e-b657-2fdb94415c41\") " pod="openshift-marketplace/community-operators-g7nwd" Dec 05 06:29:01 crc kubenswrapper[4742]: I1205 06:29:01.665684 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g7nwd" Dec 05 06:29:02 crc kubenswrapper[4742]: I1205 06:29:02.200738 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-g7nwd"] Dec 05 06:29:02 crc kubenswrapper[4742]: I1205 06:29:02.492692 4742 generic.go:334] "Generic (PLEG): container finished" podID="43a8a280-2785-4b8e-b657-2fdb94415c41" containerID="eaa46a39eb0daf8828ef25357b99fba62f611977d2167063177bb87dbecdf7eb" exitCode=0 Dec 05 06:29:02 crc kubenswrapper[4742]: I1205 06:29:02.492823 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g7nwd" event={"ID":"43a8a280-2785-4b8e-b657-2fdb94415c41","Type":"ContainerDied","Data":"eaa46a39eb0daf8828ef25357b99fba62f611977d2167063177bb87dbecdf7eb"} Dec 05 06:29:02 crc kubenswrapper[4742]: I1205 06:29:02.493045 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g7nwd" event={"ID":"43a8a280-2785-4b8e-b657-2fdb94415c41","Type":"ContainerStarted","Data":"b5dc97f01f69d40abc23707e9038c66b897bf052f08f1bcb4b3caed23651454e"} Dec 05 06:29:02 crc kubenswrapper[4742]: I1205 06:29:02.495243 4742 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 06:29:03 crc kubenswrapper[4742]: I1205 06:29:03.503980 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g7nwd" event={"ID":"43a8a280-2785-4b8e-b657-2fdb94415c41","Type":"ContainerStarted","Data":"c7ad2e491f177d0698c6a6dbc161c898c62588d93ba6486d1c6deedc48b3a0f3"} Dec 05 06:29:03 crc kubenswrapper[4742]: I1205 06:29:03.950313 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5bmqk"] Dec 05 06:29:03 crc kubenswrapper[4742]: I1205 06:29:03.953032 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5bmqk" Dec 05 06:29:03 crc kubenswrapper[4742]: I1205 06:29:03.961565 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5bmqk"] Dec 05 06:29:04 crc kubenswrapper[4742]: I1205 06:29:04.100677 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96lql\" (UniqueName: \"kubernetes.io/projected/b01d4abd-328c-4918-9449-172bf6aeef4b-kube-api-access-96lql\") pod \"redhat-marketplace-5bmqk\" (UID: \"b01d4abd-328c-4918-9449-172bf6aeef4b\") " pod="openshift-marketplace/redhat-marketplace-5bmqk" Dec 05 06:29:04 crc kubenswrapper[4742]: I1205 06:29:04.100746 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b01d4abd-328c-4918-9449-172bf6aeef4b-utilities\") pod \"redhat-marketplace-5bmqk\" (UID: \"b01d4abd-328c-4918-9449-172bf6aeef4b\") " pod="openshift-marketplace/redhat-marketplace-5bmqk" Dec 05 06:29:04 crc kubenswrapper[4742]: I1205 06:29:04.100871 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b01d4abd-328c-4918-9449-172bf6aeef4b-catalog-content\") pod \"redhat-marketplace-5bmqk\" (UID: \"b01d4abd-328c-4918-9449-172bf6aeef4b\") " pod="openshift-marketplace/redhat-marketplace-5bmqk" Dec 05 06:29:04 crc kubenswrapper[4742]: I1205 06:29:04.202347 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96lql\" (UniqueName: \"kubernetes.io/projected/b01d4abd-328c-4918-9449-172bf6aeef4b-kube-api-access-96lql\") pod \"redhat-marketplace-5bmqk\" (UID: \"b01d4abd-328c-4918-9449-172bf6aeef4b\") " pod="openshift-marketplace/redhat-marketplace-5bmqk" Dec 05 06:29:04 crc kubenswrapper[4742]: I1205 06:29:04.202428 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b01d4abd-328c-4918-9449-172bf6aeef4b-utilities\") pod \"redhat-marketplace-5bmqk\" (UID: \"b01d4abd-328c-4918-9449-172bf6aeef4b\") " pod="openshift-marketplace/redhat-marketplace-5bmqk" Dec 05 06:29:04 crc kubenswrapper[4742]: I1205 06:29:04.202515 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b01d4abd-328c-4918-9449-172bf6aeef4b-catalog-content\") pod \"redhat-marketplace-5bmqk\" (UID: \"b01d4abd-328c-4918-9449-172bf6aeef4b\") " pod="openshift-marketplace/redhat-marketplace-5bmqk" Dec 05 06:29:04 crc kubenswrapper[4742]: I1205 06:29:04.203124 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b01d4abd-328c-4918-9449-172bf6aeef4b-utilities\") pod \"redhat-marketplace-5bmqk\" (UID: \"b01d4abd-328c-4918-9449-172bf6aeef4b\") " pod="openshift-marketplace/redhat-marketplace-5bmqk" Dec 05 06:29:04 crc kubenswrapper[4742]: I1205 06:29:04.203328 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b01d4abd-328c-4918-9449-172bf6aeef4b-catalog-content\") pod \"redhat-marketplace-5bmqk\" (UID: \"b01d4abd-328c-4918-9449-172bf6aeef4b\") " pod="openshift-marketplace/redhat-marketplace-5bmqk" Dec 05 06:29:04 crc kubenswrapper[4742]: I1205 06:29:04.225767 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96lql\" (UniqueName: \"kubernetes.io/projected/b01d4abd-328c-4918-9449-172bf6aeef4b-kube-api-access-96lql\") pod \"redhat-marketplace-5bmqk\" (UID: \"b01d4abd-328c-4918-9449-172bf6aeef4b\") " pod="openshift-marketplace/redhat-marketplace-5bmqk" Dec 05 06:29:04 crc kubenswrapper[4742]: I1205 06:29:04.292576 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5bmqk" Dec 05 06:29:04 crc kubenswrapper[4742]: I1205 06:29:04.513565 4742 generic.go:334] "Generic (PLEG): container finished" podID="43a8a280-2785-4b8e-b657-2fdb94415c41" containerID="c7ad2e491f177d0698c6a6dbc161c898c62588d93ba6486d1c6deedc48b3a0f3" exitCode=0 Dec 05 06:29:04 crc kubenswrapper[4742]: I1205 06:29:04.513674 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g7nwd" event={"ID":"43a8a280-2785-4b8e-b657-2fdb94415c41","Type":"ContainerDied","Data":"c7ad2e491f177d0698c6a6dbc161c898c62588d93ba6486d1c6deedc48b3a0f3"} Dec 05 06:29:04 crc kubenswrapper[4742]: I1205 06:29:04.741692 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5bmqk"] Dec 05 06:29:05 crc kubenswrapper[4742]: I1205 06:29:05.532141 4742 generic.go:334] "Generic (PLEG): container finished" podID="b01d4abd-328c-4918-9449-172bf6aeef4b" containerID="72b4979fdaa1700cd4127bb769d21e6f54b4f766ec771459f2f0f07d9feb3dee" exitCode=0 Dec 05 06:29:05 crc kubenswrapper[4742]: I1205 06:29:05.533188 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5bmqk" event={"ID":"b01d4abd-328c-4918-9449-172bf6aeef4b","Type":"ContainerDied","Data":"72b4979fdaa1700cd4127bb769d21e6f54b4f766ec771459f2f0f07d9feb3dee"} Dec 05 06:29:05 crc kubenswrapper[4742]: I1205 06:29:05.533261 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5bmqk" event={"ID":"b01d4abd-328c-4918-9449-172bf6aeef4b","Type":"ContainerStarted","Data":"ecadf97b2bc395a18ab0a6b22504829a53e0aea2c4914b3f20c2590044c0a6a3"} Dec 05 06:29:05 crc kubenswrapper[4742]: I1205 06:29:05.542151 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g7nwd" event={"ID":"43a8a280-2785-4b8e-b657-2fdb94415c41","Type":"ContainerStarted","Data":"d1b84a117edb4944c8c0e6ff2e01b5807c293cba71e41883fa118f669be7dda4"} Dec 05 06:29:05 crc kubenswrapper[4742]: I1205 06:29:05.582503 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-g7nwd" podStartSLOduration=1.8927958120000001 podStartE2EDuration="4.582465768s" podCreationTimestamp="2025-12-05 06:29:01 +0000 UTC" firstStartedPulling="2025-12-05 06:29:02.494667021 +0000 UTC m=+2218.406802123" lastFinishedPulling="2025-12-05 06:29:05.184336997 +0000 UTC m=+2221.096472079" observedRunningTime="2025-12-05 06:29:05.581660416 +0000 UTC m=+2221.493795488" watchObservedRunningTime="2025-12-05 06:29:05.582465768 +0000 UTC m=+2221.494600870" Dec 05 06:29:06 crc kubenswrapper[4742]: I1205 06:29:06.134910 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-8bv59"] Dec 05 06:29:06 crc kubenswrapper[4742]: I1205 06:29:06.137024 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8bv59" Dec 05 06:29:06 crc kubenswrapper[4742]: I1205 06:29:06.179818 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8bv59"] Dec 05 06:29:06 crc kubenswrapper[4742]: I1205 06:29:06.237102 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8r8ml\" (UniqueName: \"kubernetes.io/projected/69ffb1e0-463e-419d-b31a-a8f9481b4a60-kube-api-access-8r8ml\") pod \"redhat-operators-8bv59\" (UID: \"69ffb1e0-463e-419d-b31a-a8f9481b4a60\") " pod="openshift-marketplace/redhat-operators-8bv59" Dec 05 06:29:06 crc kubenswrapper[4742]: I1205 06:29:06.237156 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69ffb1e0-463e-419d-b31a-a8f9481b4a60-catalog-content\") pod \"redhat-operators-8bv59\" (UID: \"69ffb1e0-463e-419d-b31a-a8f9481b4a60\") " pod="openshift-marketplace/redhat-operators-8bv59" Dec 05 06:29:06 crc kubenswrapper[4742]: I1205 06:29:06.237311 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69ffb1e0-463e-419d-b31a-a8f9481b4a60-utilities\") pod \"redhat-operators-8bv59\" (UID: \"69ffb1e0-463e-419d-b31a-a8f9481b4a60\") " pod="openshift-marketplace/redhat-operators-8bv59" Dec 05 06:29:06 crc kubenswrapper[4742]: I1205 06:29:06.338475 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8r8ml\" (UniqueName: \"kubernetes.io/projected/69ffb1e0-463e-419d-b31a-a8f9481b4a60-kube-api-access-8r8ml\") pod \"redhat-operators-8bv59\" (UID: \"69ffb1e0-463e-419d-b31a-a8f9481b4a60\") " pod="openshift-marketplace/redhat-operators-8bv59" Dec 05 06:29:06 crc kubenswrapper[4742]: I1205 06:29:06.338569 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69ffb1e0-463e-419d-b31a-a8f9481b4a60-catalog-content\") pod \"redhat-operators-8bv59\" (UID: \"69ffb1e0-463e-419d-b31a-a8f9481b4a60\") " pod="openshift-marketplace/redhat-operators-8bv59" Dec 05 06:29:06 crc kubenswrapper[4742]: I1205 06:29:06.338633 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69ffb1e0-463e-419d-b31a-a8f9481b4a60-utilities\") pod \"redhat-operators-8bv59\" (UID: \"69ffb1e0-463e-419d-b31a-a8f9481b4a60\") " pod="openshift-marketplace/redhat-operators-8bv59" Dec 05 06:29:06 crc kubenswrapper[4742]: I1205 06:29:06.339313 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69ffb1e0-463e-419d-b31a-a8f9481b4a60-utilities\") pod \"redhat-operators-8bv59\" (UID: \"69ffb1e0-463e-419d-b31a-a8f9481b4a60\") " pod="openshift-marketplace/redhat-operators-8bv59" Dec 05 06:29:06 crc kubenswrapper[4742]: I1205 06:29:06.339433 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69ffb1e0-463e-419d-b31a-a8f9481b4a60-catalog-content\") pod \"redhat-operators-8bv59\" (UID: \"69ffb1e0-463e-419d-b31a-a8f9481b4a60\") " pod="openshift-marketplace/redhat-operators-8bv59" Dec 05 06:29:06 crc kubenswrapper[4742]: I1205 06:29:06.372640 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8r8ml\" (UniqueName: \"kubernetes.io/projected/69ffb1e0-463e-419d-b31a-a8f9481b4a60-kube-api-access-8r8ml\") pod \"redhat-operators-8bv59\" (UID: \"69ffb1e0-463e-419d-b31a-a8f9481b4a60\") " pod="openshift-marketplace/redhat-operators-8bv59" Dec 05 06:29:06 crc kubenswrapper[4742]: I1205 06:29:06.507633 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8bv59" Dec 05 06:29:06 crc kubenswrapper[4742]: I1205 06:29:06.560751 4742 generic.go:334] "Generic (PLEG): container finished" podID="b01d4abd-328c-4918-9449-172bf6aeef4b" containerID="1ecf3302b1084008782b07617222c0ae047816280e4c6b125cbae80042506854" exitCode=0 Dec 05 06:29:06 crc kubenswrapper[4742]: I1205 06:29:06.560888 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5bmqk" event={"ID":"b01d4abd-328c-4918-9449-172bf6aeef4b","Type":"ContainerDied","Data":"1ecf3302b1084008782b07617222c0ae047816280e4c6b125cbae80042506854"} Dec 05 06:29:06 crc kubenswrapper[4742]: I1205 06:29:06.748894 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8bv59"] Dec 05 06:29:07 crc kubenswrapper[4742]: I1205 06:29:07.572569 4742 generic.go:334] "Generic (PLEG): container finished" podID="69ffb1e0-463e-419d-b31a-a8f9481b4a60" containerID="d29ccfe8f5f4824faa1590fea32b4097f1d740de5187665786b9b1ae2e360903" exitCode=0 Dec 05 06:29:07 crc kubenswrapper[4742]: I1205 06:29:07.572666 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8bv59" event={"ID":"69ffb1e0-463e-419d-b31a-a8f9481b4a60","Type":"ContainerDied","Data":"d29ccfe8f5f4824faa1590fea32b4097f1d740de5187665786b9b1ae2e360903"} Dec 05 06:29:07 crc kubenswrapper[4742]: I1205 06:29:07.572902 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8bv59" event={"ID":"69ffb1e0-463e-419d-b31a-a8f9481b4a60","Type":"ContainerStarted","Data":"7401bb14d69f9d981275c921af5ed9b2008617b9839692cc9bc11144ea1be6b0"} Dec 05 06:29:07 crc kubenswrapper[4742]: I1205 06:29:07.576730 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5bmqk" event={"ID":"b01d4abd-328c-4918-9449-172bf6aeef4b","Type":"ContainerStarted","Data":"cb67dc4f996240f29bfde6d1ff9c15493b22e936554270466c60c8aded3b86a5"} Dec 05 06:29:07 crc kubenswrapper[4742]: I1205 06:29:07.616003 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5bmqk" podStartSLOduration=3.178061015 podStartE2EDuration="4.615984447s" podCreationTimestamp="2025-12-05 06:29:03 +0000 UTC" firstStartedPulling="2025-12-05 06:29:05.537293827 +0000 UTC m=+2221.449428939" lastFinishedPulling="2025-12-05 06:29:06.975217309 +0000 UTC m=+2222.887352371" observedRunningTime="2025-12-05 06:29:07.611235021 +0000 UTC m=+2223.523370083" watchObservedRunningTime="2025-12-05 06:29:07.615984447 +0000 UTC m=+2223.528119509" Dec 05 06:29:08 crc kubenswrapper[4742]: I1205 06:29:08.588374 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8bv59" event={"ID":"69ffb1e0-463e-419d-b31a-a8f9481b4a60","Type":"ContainerStarted","Data":"aa916133425a3124bde8959af8c03f83fa622bba6790decf5b0bd4b1eb883b62"} Dec 05 06:29:09 crc kubenswrapper[4742]: I1205 06:29:09.600628 4742 generic.go:334] "Generic (PLEG): container finished" podID="69ffb1e0-463e-419d-b31a-a8f9481b4a60" containerID="aa916133425a3124bde8959af8c03f83fa622bba6790decf5b0bd4b1eb883b62" exitCode=0 Dec 05 06:29:09 crc kubenswrapper[4742]: I1205 06:29:09.600698 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8bv59" event={"ID":"69ffb1e0-463e-419d-b31a-a8f9481b4a60","Type":"ContainerDied","Data":"aa916133425a3124bde8959af8c03f83fa622bba6790decf5b0bd4b1eb883b62"} Dec 05 06:29:10 crc kubenswrapper[4742]: I1205 06:29:10.613245 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8bv59" event={"ID":"69ffb1e0-463e-419d-b31a-a8f9481b4a60","Type":"ContainerStarted","Data":"be228272a1fafd70ed739e2822f35389ebad48d70c86c779c32b2b185831bcf0"} Dec 05 06:29:10 crc kubenswrapper[4742]: I1205 06:29:10.651851 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-8bv59" podStartSLOduration=2.205279666 podStartE2EDuration="4.651824983s" podCreationTimestamp="2025-12-05 06:29:06 +0000 UTC" firstStartedPulling="2025-12-05 06:29:07.574181126 +0000 UTC m=+2223.486316188" lastFinishedPulling="2025-12-05 06:29:10.020726443 +0000 UTC m=+2225.932861505" observedRunningTime="2025-12-05 06:29:10.640338188 +0000 UTC m=+2226.552473290" watchObservedRunningTime="2025-12-05 06:29:10.651824983 +0000 UTC m=+2226.563960085" Dec 05 06:29:11 crc kubenswrapper[4742]: I1205 06:29:11.666303 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-g7nwd" Dec 05 06:29:11 crc kubenswrapper[4742]: I1205 06:29:11.666432 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-g7nwd" Dec 05 06:29:11 crc kubenswrapper[4742]: I1205 06:29:11.707664 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-g7nwd" Dec 05 06:29:12 crc kubenswrapper[4742]: I1205 06:29:12.695488 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-g7nwd" Dec 05 06:29:14 crc kubenswrapper[4742]: I1205 06:29:14.293037 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5bmqk" Dec 05 06:29:14 crc kubenswrapper[4742]: I1205 06:29:14.293365 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5bmqk" Dec 05 06:29:14 crc kubenswrapper[4742]: I1205 06:29:14.321900 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-g7nwd"] Dec 05 06:29:14 crc kubenswrapper[4742]: I1205 06:29:14.340511 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5bmqk" Dec 05 06:29:14 crc kubenswrapper[4742]: I1205 06:29:14.693716 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5bmqk" Dec 05 06:29:15 crc kubenswrapper[4742]: I1205 06:29:15.652715 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-g7nwd" podUID="43a8a280-2785-4b8e-b657-2fdb94415c41" containerName="registry-server" containerID="cri-o://d1b84a117edb4944c8c0e6ff2e01b5807c293cba71e41883fa118f669be7dda4" gracePeriod=2 Dec 05 06:29:15 crc kubenswrapper[4742]: I1205 06:29:15.737033 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5bmqk"] Dec 05 06:29:16 crc kubenswrapper[4742]: I1205 06:29:16.508259 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-8bv59" Dec 05 06:29:16 crc kubenswrapper[4742]: I1205 06:29:16.510311 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-8bv59" Dec 05 06:29:16 crc kubenswrapper[4742]: I1205 06:29:16.660299 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5bmqk" podUID="b01d4abd-328c-4918-9449-172bf6aeef4b" containerName="registry-server" containerID="cri-o://cb67dc4f996240f29bfde6d1ff9c15493b22e936554270466c60c8aded3b86a5" gracePeriod=2 Dec 05 06:29:16 crc kubenswrapper[4742]: I1205 06:29:16.671225 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:29:16 crc kubenswrapper[4742]: I1205 06:29:16.671314 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:29:17 crc kubenswrapper[4742]: I1205 06:29:17.573360 4742 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-8bv59" podUID="69ffb1e0-463e-419d-b31a-a8f9481b4a60" containerName="registry-server" probeResult="failure" output=< Dec 05 06:29:17 crc kubenswrapper[4742]: timeout: failed to connect service ":50051" within 1s Dec 05 06:29:17 crc kubenswrapper[4742]: > Dec 05 06:29:17 crc kubenswrapper[4742]: I1205 06:29:17.674736 4742 generic.go:334] "Generic (PLEG): container finished" podID="b01d4abd-328c-4918-9449-172bf6aeef4b" containerID="cb67dc4f996240f29bfde6d1ff9c15493b22e936554270466c60c8aded3b86a5" exitCode=0 Dec 05 06:29:17 crc kubenswrapper[4742]: I1205 06:29:17.674846 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5bmqk" event={"ID":"b01d4abd-328c-4918-9449-172bf6aeef4b","Type":"ContainerDied","Data":"cb67dc4f996240f29bfde6d1ff9c15493b22e936554270466c60c8aded3b86a5"} Dec 05 06:29:17 crc kubenswrapper[4742]: I1205 06:29:17.678868 4742 generic.go:334] "Generic (PLEG): container finished" podID="43a8a280-2785-4b8e-b657-2fdb94415c41" containerID="d1b84a117edb4944c8c0e6ff2e01b5807c293cba71e41883fa118f669be7dda4" exitCode=0 Dec 05 06:29:17 crc kubenswrapper[4742]: I1205 06:29:17.678942 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g7nwd" event={"ID":"43a8a280-2785-4b8e-b657-2fdb94415c41","Type":"ContainerDied","Data":"d1b84a117edb4944c8c0e6ff2e01b5807c293cba71e41883fa118f669be7dda4"} Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.053856 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g7nwd" Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.061785 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43a8a280-2785-4b8e-b657-2fdb94415c41-catalog-content\") pod \"43a8a280-2785-4b8e-b657-2fdb94415c41\" (UID: \"43a8a280-2785-4b8e-b657-2fdb94415c41\") " Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.061863 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43a8a280-2785-4b8e-b657-2fdb94415c41-utilities\") pod \"43a8a280-2785-4b8e-b657-2fdb94415c41\" (UID: \"43a8a280-2785-4b8e-b657-2fdb94415c41\") " Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.061974 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hczdg\" (UniqueName: \"kubernetes.io/projected/43a8a280-2785-4b8e-b657-2fdb94415c41-kube-api-access-hczdg\") pod \"43a8a280-2785-4b8e-b657-2fdb94415c41\" (UID: \"43a8a280-2785-4b8e-b657-2fdb94415c41\") " Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.067240 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43a8a280-2785-4b8e-b657-2fdb94415c41-utilities" (OuterVolumeSpecName: "utilities") pod "43a8a280-2785-4b8e-b657-2fdb94415c41" (UID: "43a8a280-2785-4b8e-b657-2fdb94415c41"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.074896 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43a8a280-2785-4b8e-b657-2fdb94415c41-kube-api-access-hczdg" (OuterVolumeSpecName: "kube-api-access-hczdg") pod "43a8a280-2785-4b8e-b657-2fdb94415c41" (UID: "43a8a280-2785-4b8e-b657-2fdb94415c41"). InnerVolumeSpecName "kube-api-access-hczdg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.163004 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43a8a280-2785-4b8e-b657-2fdb94415c41-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "43a8a280-2785-4b8e-b657-2fdb94415c41" (UID: "43a8a280-2785-4b8e-b657-2fdb94415c41"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.163884 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hczdg\" (UniqueName: \"kubernetes.io/projected/43a8a280-2785-4b8e-b657-2fdb94415c41-kube-api-access-hczdg\") on node \"crc\" DevicePath \"\"" Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.163906 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43a8a280-2785-4b8e-b657-2fdb94415c41-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.163914 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43a8a280-2785-4b8e-b657-2fdb94415c41-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.231463 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5bmqk" Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.264800 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b01d4abd-328c-4918-9449-172bf6aeef4b-utilities\") pod \"b01d4abd-328c-4918-9449-172bf6aeef4b\" (UID: \"b01d4abd-328c-4918-9449-172bf6aeef4b\") " Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.264874 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b01d4abd-328c-4918-9449-172bf6aeef4b-catalog-content\") pod \"b01d4abd-328c-4918-9449-172bf6aeef4b\" (UID: \"b01d4abd-328c-4918-9449-172bf6aeef4b\") " Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.264928 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-96lql\" (UniqueName: \"kubernetes.io/projected/b01d4abd-328c-4918-9449-172bf6aeef4b-kube-api-access-96lql\") pod \"b01d4abd-328c-4918-9449-172bf6aeef4b\" (UID: \"b01d4abd-328c-4918-9449-172bf6aeef4b\") " Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.268195 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b01d4abd-328c-4918-9449-172bf6aeef4b-kube-api-access-96lql" (OuterVolumeSpecName: "kube-api-access-96lql") pod "b01d4abd-328c-4918-9449-172bf6aeef4b" (UID: "b01d4abd-328c-4918-9449-172bf6aeef4b"). InnerVolumeSpecName "kube-api-access-96lql". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.269305 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b01d4abd-328c-4918-9449-172bf6aeef4b-utilities" (OuterVolumeSpecName: "utilities") pod "b01d4abd-328c-4918-9449-172bf6aeef4b" (UID: "b01d4abd-328c-4918-9449-172bf6aeef4b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.293505 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b01d4abd-328c-4918-9449-172bf6aeef4b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b01d4abd-328c-4918-9449-172bf6aeef4b" (UID: "b01d4abd-328c-4918-9449-172bf6aeef4b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.365741 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b01d4abd-328c-4918-9449-172bf6aeef4b-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.365806 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b01d4abd-328c-4918-9449-172bf6aeef4b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.365830 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-96lql\" (UniqueName: \"kubernetes.io/projected/b01d4abd-328c-4918-9449-172bf6aeef4b-kube-api-access-96lql\") on node \"crc\" DevicePath \"\"" Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.693219 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-g7nwd" Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.693049 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-g7nwd" event={"ID":"43a8a280-2785-4b8e-b657-2fdb94415c41","Type":"ContainerDied","Data":"b5dc97f01f69d40abc23707e9038c66b897bf052f08f1bcb4b3caed23651454e"} Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.693575 4742 scope.go:117] "RemoveContainer" containerID="d1b84a117edb4944c8c0e6ff2e01b5807c293cba71e41883fa118f669be7dda4" Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.699840 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5bmqk" event={"ID":"b01d4abd-328c-4918-9449-172bf6aeef4b","Type":"ContainerDied","Data":"ecadf97b2bc395a18ab0a6b22504829a53e0aea2c4914b3f20c2590044c0a6a3"} Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.699932 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5bmqk" Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.727276 4742 scope.go:117] "RemoveContainer" containerID="c7ad2e491f177d0698c6a6dbc161c898c62588d93ba6486d1c6deedc48b3a0f3" Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.740287 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-g7nwd"] Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.751409 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-g7nwd"] Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.760651 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5bmqk"] Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.762349 4742 scope.go:117] "RemoveContainer" containerID="eaa46a39eb0daf8828ef25357b99fba62f611977d2167063177bb87dbecdf7eb" Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.768546 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5bmqk"] Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.821168 4742 scope.go:117] "RemoveContainer" containerID="cb67dc4f996240f29bfde6d1ff9c15493b22e936554270466c60c8aded3b86a5" Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.849187 4742 scope.go:117] "RemoveContainer" containerID="1ecf3302b1084008782b07617222c0ae047816280e4c6b125cbae80042506854" Dec 05 06:29:18 crc kubenswrapper[4742]: I1205 06:29:18.873032 4742 scope.go:117] "RemoveContainer" containerID="72b4979fdaa1700cd4127bb769d21e6f54b4f766ec771459f2f0f07d9feb3dee" Dec 05 06:29:20 crc kubenswrapper[4742]: I1205 06:29:20.392914 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43a8a280-2785-4b8e-b657-2fdb94415c41" path="/var/lib/kubelet/pods/43a8a280-2785-4b8e-b657-2fdb94415c41/volumes" Dec 05 06:29:20 crc kubenswrapper[4742]: I1205 06:29:20.394155 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b01d4abd-328c-4918-9449-172bf6aeef4b" path="/var/lib/kubelet/pods/b01d4abd-328c-4918-9449-172bf6aeef4b/volumes" Dec 05 06:29:26 crc kubenswrapper[4742]: I1205 06:29:26.594831 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-8bv59" Dec 05 06:29:26 crc kubenswrapper[4742]: I1205 06:29:26.669827 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-8bv59" Dec 05 06:29:26 crc kubenswrapper[4742]: I1205 06:29:26.838621 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8bv59"] Dec 05 06:29:27 crc kubenswrapper[4742]: I1205 06:29:27.798211 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-8bv59" podUID="69ffb1e0-463e-419d-b31a-a8f9481b4a60" containerName="registry-server" containerID="cri-o://be228272a1fafd70ed739e2822f35389ebad48d70c86c779c32b2b185831bcf0" gracePeriod=2 Dec 05 06:29:28 crc kubenswrapper[4742]: I1205 06:29:28.354719 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8bv59" Dec 05 06:29:28 crc kubenswrapper[4742]: I1205 06:29:28.542629 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8r8ml\" (UniqueName: \"kubernetes.io/projected/69ffb1e0-463e-419d-b31a-a8f9481b4a60-kube-api-access-8r8ml\") pod \"69ffb1e0-463e-419d-b31a-a8f9481b4a60\" (UID: \"69ffb1e0-463e-419d-b31a-a8f9481b4a60\") " Dec 05 06:29:28 crc kubenswrapper[4742]: I1205 06:29:28.542699 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69ffb1e0-463e-419d-b31a-a8f9481b4a60-utilities\") pod \"69ffb1e0-463e-419d-b31a-a8f9481b4a60\" (UID: \"69ffb1e0-463e-419d-b31a-a8f9481b4a60\") " Dec 05 06:29:28 crc kubenswrapper[4742]: I1205 06:29:28.542760 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69ffb1e0-463e-419d-b31a-a8f9481b4a60-catalog-content\") pod \"69ffb1e0-463e-419d-b31a-a8f9481b4a60\" (UID: \"69ffb1e0-463e-419d-b31a-a8f9481b4a60\") " Dec 05 06:29:28 crc kubenswrapper[4742]: I1205 06:29:28.543600 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69ffb1e0-463e-419d-b31a-a8f9481b4a60-utilities" (OuterVolumeSpecName: "utilities") pod "69ffb1e0-463e-419d-b31a-a8f9481b4a60" (UID: "69ffb1e0-463e-419d-b31a-a8f9481b4a60"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:29:28 crc kubenswrapper[4742]: I1205 06:29:28.550711 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69ffb1e0-463e-419d-b31a-a8f9481b4a60-kube-api-access-8r8ml" (OuterVolumeSpecName: "kube-api-access-8r8ml") pod "69ffb1e0-463e-419d-b31a-a8f9481b4a60" (UID: "69ffb1e0-463e-419d-b31a-a8f9481b4a60"). InnerVolumeSpecName "kube-api-access-8r8ml". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:29:28 crc kubenswrapper[4742]: I1205 06:29:28.644530 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8r8ml\" (UniqueName: \"kubernetes.io/projected/69ffb1e0-463e-419d-b31a-a8f9481b4a60-kube-api-access-8r8ml\") on node \"crc\" DevicePath \"\"" Dec 05 06:29:28 crc kubenswrapper[4742]: I1205 06:29:28.644578 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69ffb1e0-463e-419d-b31a-a8f9481b4a60-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:29:28 crc kubenswrapper[4742]: I1205 06:29:28.701708 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69ffb1e0-463e-419d-b31a-a8f9481b4a60-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "69ffb1e0-463e-419d-b31a-a8f9481b4a60" (UID: "69ffb1e0-463e-419d-b31a-a8f9481b4a60"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:29:28 crc kubenswrapper[4742]: I1205 06:29:28.746275 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69ffb1e0-463e-419d-b31a-a8f9481b4a60-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:29:28 crc kubenswrapper[4742]: I1205 06:29:28.815161 4742 generic.go:334] "Generic (PLEG): container finished" podID="69ffb1e0-463e-419d-b31a-a8f9481b4a60" containerID="be228272a1fafd70ed739e2822f35389ebad48d70c86c779c32b2b185831bcf0" exitCode=0 Dec 05 06:29:28 crc kubenswrapper[4742]: I1205 06:29:28.815209 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8bv59" event={"ID":"69ffb1e0-463e-419d-b31a-a8f9481b4a60","Type":"ContainerDied","Data":"be228272a1fafd70ed739e2822f35389ebad48d70c86c779c32b2b185831bcf0"} Dec 05 06:29:28 crc kubenswrapper[4742]: I1205 06:29:28.815239 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8bv59" event={"ID":"69ffb1e0-463e-419d-b31a-a8f9481b4a60","Type":"ContainerDied","Data":"7401bb14d69f9d981275c921af5ed9b2008617b9839692cc9bc11144ea1be6b0"} Dec 05 06:29:28 crc kubenswrapper[4742]: I1205 06:29:28.815258 4742 scope.go:117] "RemoveContainer" containerID="be228272a1fafd70ed739e2822f35389ebad48d70c86c779c32b2b185831bcf0" Dec 05 06:29:28 crc kubenswrapper[4742]: I1205 06:29:28.815381 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8bv59" Dec 05 06:29:28 crc kubenswrapper[4742]: I1205 06:29:28.862629 4742 scope.go:117] "RemoveContainer" containerID="aa916133425a3124bde8959af8c03f83fa622bba6790decf5b0bd4b1eb883b62" Dec 05 06:29:28 crc kubenswrapper[4742]: I1205 06:29:28.865345 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8bv59"] Dec 05 06:29:28 crc kubenswrapper[4742]: I1205 06:29:28.872355 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-8bv59"] Dec 05 06:29:28 crc kubenswrapper[4742]: I1205 06:29:28.886151 4742 scope.go:117] "RemoveContainer" containerID="d29ccfe8f5f4824faa1590fea32b4097f1d740de5187665786b9b1ae2e360903" Dec 05 06:29:28 crc kubenswrapper[4742]: I1205 06:29:28.934195 4742 scope.go:117] "RemoveContainer" containerID="be228272a1fafd70ed739e2822f35389ebad48d70c86c779c32b2b185831bcf0" Dec 05 06:29:28 crc kubenswrapper[4742]: E1205 06:29:28.935258 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be228272a1fafd70ed739e2822f35389ebad48d70c86c779c32b2b185831bcf0\": container with ID starting with be228272a1fafd70ed739e2822f35389ebad48d70c86c779c32b2b185831bcf0 not found: ID does not exist" containerID="be228272a1fafd70ed739e2822f35389ebad48d70c86c779c32b2b185831bcf0" Dec 05 06:29:28 crc kubenswrapper[4742]: I1205 06:29:28.935335 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be228272a1fafd70ed739e2822f35389ebad48d70c86c779c32b2b185831bcf0"} err="failed to get container status \"be228272a1fafd70ed739e2822f35389ebad48d70c86c779c32b2b185831bcf0\": rpc error: code = NotFound desc = could not find container \"be228272a1fafd70ed739e2822f35389ebad48d70c86c779c32b2b185831bcf0\": container with ID starting with be228272a1fafd70ed739e2822f35389ebad48d70c86c779c32b2b185831bcf0 not found: ID does not exist" Dec 05 06:29:28 crc kubenswrapper[4742]: I1205 06:29:28.935380 4742 scope.go:117] "RemoveContainer" containerID="aa916133425a3124bde8959af8c03f83fa622bba6790decf5b0bd4b1eb883b62" Dec 05 06:29:28 crc kubenswrapper[4742]: E1205 06:29:28.935855 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa916133425a3124bde8959af8c03f83fa622bba6790decf5b0bd4b1eb883b62\": container with ID starting with aa916133425a3124bde8959af8c03f83fa622bba6790decf5b0bd4b1eb883b62 not found: ID does not exist" containerID="aa916133425a3124bde8959af8c03f83fa622bba6790decf5b0bd4b1eb883b62" Dec 05 06:29:28 crc kubenswrapper[4742]: I1205 06:29:28.935928 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa916133425a3124bde8959af8c03f83fa622bba6790decf5b0bd4b1eb883b62"} err="failed to get container status \"aa916133425a3124bde8959af8c03f83fa622bba6790decf5b0bd4b1eb883b62\": rpc error: code = NotFound desc = could not find container \"aa916133425a3124bde8959af8c03f83fa622bba6790decf5b0bd4b1eb883b62\": container with ID starting with aa916133425a3124bde8959af8c03f83fa622bba6790decf5b0bd4b1eb883b62 not found: ID does not exist" Dec 05 06:29:28 crc kubenswrapper[4742]: I1205 06:29:28.935975 4742 scope.go:117] "RemoveContainer" containerID="d29ccfe8f5f4824faa1590fea32b4097f1d740de5187665786b9b1ae2e360903" Dec 05 06:29:28 crc kubenswrapper[4742]: E1205 06:29:28.936457 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d29ccfe8f5f4824faa1590fea32b4097f1d740de5187665786b9b1ae2e360903\": container with ID starting with d29ccfe8f5f4824faa1590fea32b4097f1d740de5187665786b9b1ae2e360903 not found: ID does not exist" containerID="d29ccfe8f5f4824faa1590fea32b4097f1d740de5187665786b9b1ae2e360903" Dec 05 06:29:28 crc kubenswrapper[4742]: I1205 06:29:28.936517 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d29ccfe8f5f4824faa1590fea32b4097f1d740de5187665786b9b1ae2e360903"} err="failed to get container status \"d29ccfe8f5f4824faa1590fea32b4097f1d740de5187665786b9b1ae2e360903\": rpc error: code = NotFound desc = could not find container \"d29ccfe8f5f4824faa1590fea32b4097f1d740de5187665786b9b1ae2e360903\": container with ID starting with d29ccfe8f5f4824faa1590fea32b4097f1d740de5187665786b9b1ae2e360903 not found: ID does not exist" Dec 05 06:29:30 crc kubenswrapper[4742]: I1205 06:29:30.408410 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69ffb1e0-463e-419d-b31a-a8f9481b4a60" path="/var/lib/kubelet/pods/69ffb1e0-463e-419d-b31a-a8f9481b4a60/volumes" Dec 05 06:29:46 crc kubenswrapper[4742]: I1205 06:29:46.671313 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:29:46 crc kubenswrapper[4742]: I1205 06:29:46.671893 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:30:00 crc kubenswrapper[4742]: I1205 06:30:00.175009 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415270-225pq"] Dec 05 06:30:00 crc kubenswrapper[4742]: E1205 06:30:00.177874 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43a8a280-2785-4b8e-b657-2fdb94415c41" containerName="extract-utilities" Dec 05 06:30:00 crc kubenswrapper[4742]: I1205 06:30:00.178108 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="43a8a280-2785-4b8e-b657-2fdb94415c41" containerName="extract-utilities" Dec 05 06:30:00 crc kubenswrapper[4742]: E1205 06:30:00.178284 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b01d4abd-328c-4918-9449-172bf6aeef4b" containerName="registry-server" Dec 05 06:30:00 crc kubenswrapper[4742]: I1205 06:30:00.178421 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="b01d4abd-328c-4918-9449-172bf6aeef4b" containerName="registry-server" Dec 05 06:30:00 crc kubenswrapper[4742]: E1205 06:30:00.178610 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b01d4abd-328c-4918-9449-172bf6aeef4b" containerName="extract-utilities" Dec 05 06:30:00 crc kubenswrapper[4742]: I1205 06:30:00.178760 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="b01d4abd-328c-4918-9449-172bf6aeef4b" containerName="extract-utilities" Dec 05 06:30:00 crc kubenswrapper[4742]: E1205 06:30:00.178894 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b01d4abd-328c-4918-9449-172bf6aeef4b" containerName="extract-content" Dec 05 06:30:00 crc kubenswrapper[4742]: I1205 06:30:00.179029 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="b01d4abd-328c-4918-9449-172bf6aeef4b" containerName="extract-content" Dec 05 06:30:00 crc kubenswrapper[4742]: E1205 06:30:00.179298 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43a8a280-2785-4b8e-b657-2fdb94415c41" containerName="extract-content" Dec 05 06:30:00 crc kubenswrapper[4742]: I1205 06:30:00.179455 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="43a8a280-2785-4b8e-b657-2fdb94415c41" containerName="extract-content" Dec 05 06:30:00 crc kubenswrapper[4742]: E1205 06:30:00.179634 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69ffb1e0-463e-419d-b31a-a8f9481b4a60" containerName="extract-content" Dec 05 06:30:00 crc kubenswrapper[4742]: I1205 06:30:00.179790 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="69ffb1e0-463e-419d-b31a-a8f9481b4a60" containerName="extract-content" Dec 05 06:30:00 crc kubenswrapper[4742]: E1205 06:30:00.179964 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69ffb1e0-463e-419d-b31a-a8f9481b4a60" containerName="registry-server" Dec 05 06:30:00 crc kubenswrapper[4742]: I1205 06:30:00.180148 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="69ffb1e0-463e-419d-b31a-a8f9481b4a60" containerName="registry-server" Dec 05 06:30:00 crc kubenswrapper[4742]: E1205 06:30:00.180336 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43a8a280-2785-4b8e-b657-2fdb94415c41" containerName="registry-server" Dec 05 06:30:00 crc kubenswrapper[4742]: I1205 06:30:00.180477 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="43a8a280-2785-4b8e-b657-2fdb94415c41" containerName="registry-server" Dec 05 06:30:00 crc kubenswrapper[4742]: E1205 06:30:00.180618 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69ffb1e0-463e-419d-b31a-a8f9481b4a60" containerName="extract-utilities" Dec 05 06:30:00 crc kubenswrapper[4742]: I1205 06:30:00.180755 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="69ffb1e0-463e-419d-b31a-a8f9481b4a60" containerName="extract-utilities" Dec 05 06:30:00 crc kubenswrapper[4742]: I1205 06:30:00.181325 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="69ffb1e0-463e-419d-b31a-a8f9481b4a60" containerName="registry-server" Dec 05 06:30:00 crc kubenswrapper[4742]: I1205 06:30:00.181528 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="43a8a280-2785-4b8e-b657-2fdb94415c41" containerName="registry-server" Dec 05 06:30:00 crc kubenswrapper[4742]: I1205 06:30:00.181688 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="b01d4abd-328c-4918-9449-172bf6aeef4b" containerName="registry-server" Dec 05 06:30:00 crc kubenswrapper[4742]: I1205 06:30:00.182773 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415270-225pq" Dec 05 06:30:00 crc kubenswrapper[4742]: I1205 06:30:00.185553 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415270-225pq"] Dec 05 06:30:00 crc kubenswrapper[4742]: I1205 06:30:00.235568 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 06:30:00 crc kubenswrapper[4742]: I1205 06:30:00.235787 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 06:30:00 crc kubenswrapper[4742]: I1205 06:30:00.384504 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/985cdaf6-04f9-4f75-85aa-369243b9290f-config-volume\") pod \"collect-profiles-29415270-225pq\" (UID: \"985cdaf6-04f9-4f75-85aa-369243b9290f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415270-225pq" Dec 05 06:30:00 crc kubenswrapper[4742]: I1205 06:30:00.384685 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsmpb\" (UniqueName: \"kubernetes.io/projected/985cdaf6-04f9-4f75-85aa-369243b9290f-kube-api-access-gsmpb\") pod \"collect-profiles-29415270-225pq\" (UID: \"985cdaf6-04f9-4f75-85aa-369243b9290f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415270-225pq" Dec 05 06:30:00 crc kubenswrapper[4742]: I1205 06:30:00.385038 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/985cdaf6-04f9-4f75-85aa-369243b9290f-secret-volume\") pod \"collect-profiles-29415270-225pq\" (UID: \"985cdaf6-04f9-4f75-85aa-369243b9290f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415270-225pq" Dec 05 06:30:00 crc kubenswrapper[4742]: I1205 06:30:00.486876 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/985cdaf6-04f9-4f75-85aa-369243b9290f-config-volume\") pod \"collect-profiles-29415270-225pq\" (UID: \"985cdaf6-04f9-4f75-85aa-369243b9290f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415270-225pq" Dec 05 06:30:00 crc kubenswrapper[4742]: I1205 06:30:00.487082 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsmpb\" (UniqueName: \"kubernetes.io/projected/985cdaf6-04f9-4f75-85aa-369243b9290f-kube-api-access-gsmpb\") pod \"collect-profiles-29415270-225pq\" (UID: \"985cdaf6-04f9-4f75-85aa-369243b9290f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415270-225pq" Dec 05 06:30:00 crc kubenswrapper[4742]: I1205 06:30:00.487306 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/985cdaf6-04f9-4f75-85aa-369243b9290f-secret-volume\") pod \"collect-profiles-29415270-225pq\" (UID: \"985cdaf6-04f9-4f75-85aa-369243b9290f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415270-225pq" Dec 05 06:30:00 crc kubenswrapper[4742]: I1205 06:30:00.488419 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/985cdaf6-04f9-4f75-85aa-369243b9290f-config-volume\") pod \"collect-profiles-29415270-225pq\" (UID: \"985cdaf6-04f9-4f75-85aa-369243b9290f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415270-225pq" Dec 05 06:30:00 crc kubenswrapper[4742]: I1205 06:30:00.501829 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/985cdaf6-04f9-4f75-85aa-369243b9290f-secret-volume\") pod \"collect-profiles-29415270-225pq\" (UID: \"985cdaf6-04f9-4f75-85aa-369243b9290f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415270-225pq" Dec 05 06:30:00 crc kubenswrapper[4742]: I1205 06:30:00.519105 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsmpb\" (UniqueName: \"kubernetes.io/projected/985cdaf6-04f9-4f75-85aa-369243b9290f-kube-api-access-gsmpb\") pod \"collect-profiles-29415270-225pq\" (UID: \"985cdaf6-04f9-4f75-85aa-369243b9290f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415270-225pq" Dec 05 06:30:00 crc kubenswrapper[4742]: I1205 06:30:00.551668 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415270-225pq" Dec 05 06:30:01 crc kubenswrapper[4742]: I1205 06:30:01.018006 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415270-225pq"] Dec 05 06:30:01 crc kubenswrapper[4742]: W1205 06:30:01.027751 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod985cdaf6_04f9_4f75_85aa_369243b9290f.slice/crio-9bf121175270d6ea9585fa78139699e348e0b663fb9ef243e39de02f2bf8f716 WatchSource:0}: Error finding container 9bf121175270d6ea9585fa78139699e348e0b663fb9ef243e39de02f2bf8f716: Status 404 returned error can't find the container with id 9bf121175270d6ea9585fa78139699e348e0b663fb9ef243e39de02f2bf8f716 Dec 05 06:30:01 crc kubenswrapper[4742]: I1205 06:30:01.153015 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415270-225pq" event={"ID":"985cdaf6-04f9-4f75-85aa-369243b9290f","Type":"ContainerStarted","Data":"9bf121175270d6ea9585fa78139699e348e0b663fb9ef243e39de02f2bf8f716"} Dec 05 06:30:02 crc kubenswrapper[4742]: I1205 06:30:02.166010 4742 generic.go:334] "Generic (PLEG): container finished" podID="985cdaf6-04f9-4f75-85aa-369243b9290f" containerID="390e19dc60edeacf9687506050a9958183c23450abdd1f11308895a5f9a0d987" exitCode=0 Dec 05 06:30:02 crc kubenswrapper[4742]: I1205 06:30:02.166152 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415270-225pq" event={"ID":"985cdaf6-04f9-4f75-85aa-369243b9290f","Type":"ContainerDied","Data":"390e19dc60edeacf9687506050a9958183c23450abdd1f11308895a5f9a0d987"} Dec 05 06:30:03 crc kubenswrapper[4742]: I1205 06:30:03.528678 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415270-225pq" Dec 05 06:30:03 crc kubenswrapper[4742]: I1205 06:30:03.640255 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/985cdaf6-04f9-4f75-85aa-369243b9290f-config-volume\") pod \"985cdaf6-04f9-4f75-85aa-369243b9290f\" (UID: \"985cdaf6-04f9-4f75-85aa-369243b9290f\") " Dec 05 06:30:03 crc kubenswrapper[4742]: I1205 06:30:03.640336 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/985cdaf6-04f9-4f75-85aa-369243b9290f-secret-volume\") pod \"985cdaf6-04f9-4f75-85aa-369243b9290f\" (UID: \"985cdaf6-04f9-4f75-85aa-369243b9290f\") " Dec 05 06:30:03 crc kubenswrapper[4742]: I1205 06:30:03.640468 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gsmpb\" (UniqueName: \"kubernetes.io/projected/985cdaf6-04f9-4f75-85aa-369243b9290f-kube-api-access-gsmpb\") pod \"985cdaf6-04f9-4f75-85aa-369243b9290f\" (UID: \"985cdaf6-04f9-4f75-85aa-369243b9290f\") " Dec 05 06:30:03 crc kubenswrapper[4742]: I1205 06:30:03.641128 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/985cdaf6-04f9-4f75-85aa-369243b9290f-config-volume" (OuterVolumeSpecName: "config-volume") pod "985cdaf6-04f9-4f75-85aa-369243b9290f" (UID: "985cdaf6-04f9-4f75-85aa-369243b9290f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:30:03 crc kubenswrapper[4742]: I1205 06:30:03.646540 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/985cdaf6-04f9-4f75-85aa-369243b9290f-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "985cdaf6-04f9-4f75-85aa-369243b9290f" (UID: "985cdaf6-04f9-4f75-85aa-369243b9290f"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:30:03 crc kubenswrapper[4742]: I1205 06:30:03.652184 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/985cdaf6-04f9-4f75-85aa-369243b9290f-kube-api-access-gsmpb" (OuterVolumeSpecName: "kube-api-access-gsmpb") pod "985cdaf6-04f9-4f75-85aa-369243b9290f" (UID: "985cdaf6-04f9-4f75-85aa-369243b9290f"). InnerVolumeSpecName "kube-api-access-gsmpb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:30:03 crc kubenswrapper[4742]: I1205 06:30:03.742327 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gsmpb\" (UniqueName: \"kubernetes.io/projected/985cdaf6-04f9-4f75-85aa-369243b9290f-kube-api-access-gsmpb\") on node \"crc\" DevicePath \"\"" Dec 05 06:30:03 crc kubenswrapper[4742]: I1205 06:30:03.742384 4742 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/985cdaf6-04f9-4f75-85aa-369243b9290f-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 06:30:03 crc kubenswrapper[4742]: I1205 06:30:03.742410 4742 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/985cdaf6-04f9-4f75-85aa-369243b9290f-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 06:30:04 crc kubenswrapper[4742]: I1205 06:30:04.188535 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415270-225pq" event={"ID":"985cdaf6-04f9-4f75-85aa-369243b9290f","Type":"ContainerDied","Data":"9bf121175270d6ea9585fa78139699e348e0b663fb9ef243e39de02f2bf8f716"} Dec 05 06:30:04 crc kubenswrapper[4742]: I1205 06:30:04.188593 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9bf121175270d6ea9585fa78139699e348e0b663fb9ef243e39de02f2bf8f716" Dec 05 06:30:04 crc kubenswrapper[4742]: I1205 06:30:04.188604 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415270-225pq" Dec 05 06:30:04 crc kubenswrapper[4742]: I1205 06:30:04.614869 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415225-grqtz"] Dec 05 06:30:04 crc kubenswrapper[4742]: I1205 06:30:04.622327 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415225-grqtz"] Dec 05 06:30:06 crc kubenswrapper[4742]: I1205 06:30:06.394171 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebaea921-5d50-4d64-b73e-db0feab77248" path="/var/lib/kubelet/pods/ebaea921-5d50-4d64-b73e-db0feab77248/volumes" Dec 05 06:30:12 crc kubenswrapper[4742]: I1205 06:30:12.529089 4742 scope.go:117] "RemoveContainer" containerID="f852d749ea8342bd3d3752c9c9b2516fd2c9158e32d518022257f1233a574cd3" Dec 05 06:30:16 crc kubenswrapper[4742]: I1205 06:30:16.671647 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:30:16 crc kubenswrapper[4742]: I1205 06:30:16.672360 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:30:16 crc kubenswrapper[4742]: I1205 06:30:16.672431 4742 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" Dec 05 06:30:16 crc kubenswrapper[4742]: I1205 06:30:16.673173 4742 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e26c380b9e3c7a5dc2fec19a469a1ba3ec7073bff64fb3eac4cc5c5188052be6"} pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 06:30:16 crc kubenswrapper[4742]: I1205 06:30:16.673266 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" containerID="cri-o://e26c380b9e3c7a5dc2fec19a469a1ba3ec7073bff64fb3eac4cc5c5188052be6" gracePeriod=600 Dec 05 06:30:16 crc kubenswrapper[4742]: E1205 06:30:16.803460 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:30:17 crc kubenswrapper[4742]: I1205 06:30:17.317175 4742 generic.go:334] "Generic (PLEG): container finished" podID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerID="e26c380b9e3c7a5dc2fec19a469a1ba3ec7073bff64fb3eac4cc5c5188052be6" exitCode=0 Dec 05 06:30:17 crc kubenswrapper[4742]: I1205 06:30:17.317266 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerDied","Data":"e26c380b9e3c7a5dc2fec19a469a1ba3ec7073bff64fb3eac4cc5c5188052be6"} Dec 05 06:30:17 crc kubenswrapper[4742]: I1205 06:30:17.317424 4742 scope.go:117] "RemoveContainer" containerID="12fb54377caf5ec8b430c9e38975da830717543ac04874595b16f5a28eb29666" Dec 05 06:30:17 crc kubenswrapper[4742]: I1205 06:30:17.318783 4742 scope.go:117] "RemoveContainer" containerID="e26c380b9e3c7a5dc2fec19a469a1ba3ec7073bff64fb3eac4cc5c5188052be6" Dec 05 06:30:17 crc kubenswrapper[4742]: E1205 06:30:17.319458 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:30:28 crc kubenswrapper[4742]: I1205 06:30:28.382565 4742 scope.go:117] "RemoveContainer" containerID="e26c380b9e3c7a5dc2fec19a469a1ba3ec7073bff64fb3eac4cc5c5188052be6" Dec 05 06:30:28 crc kubenswrapper[4742]: E1205 06:30:28.383168 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:30:42 crc kubenswrapper[4742]: I1205 06:30:42.383967 4742 scope.go:117] "RemoveContainer" containerID="e26c380b9e3c7a5dc2fec19a469a1ba3ec7073bff64fb3eac4cc5c5188052be6" Dec 05 06:30:42 crc kubenswrapper[4742]: E1205 06:30:42.384568 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:30:54 crc kubenswrapper[4742]: I1205 06:30:54.386975 4742 scope.go:117] "RemoveContainer" containerID="e26c380b9e3c7a5dc2fec19a469a1ba3ec7073bff64fb3eac4cc5c5188052be6" Dec 05 06:30:54 crc kubenswrapper[4742]: E1205 06:30:54.387557 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:31:07 crc kubenswrapper[4742]: I1205 06:31:07.382914 4742 scope.go:117] "RemoveContainer" containerID="e26c380b9e3c7a5dc2fec19a469a1ba3ec7073bff64fb3eac4cc5c5188052be6" Dec 05 06:31:07 crc kubenswrapper[4742]: E1205 06:31:07.383787 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:31:21 crc kubenswrapper[4742]: I1205 06:31:21.382727 4742 scope.go:117] "RemoveContainer" containerID="e26c380b9e3c7a5dc2fec19a469a1ba3ec7073bff64fb3eac4cc5c5188052be6" Dec 05 06:31:21 crc kubenswrapper[4742]: E1205 06:31:21.383498 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:31:32 crc kubenswrapper[4742]: I1205 06:31:32.382981 4742 scope.go:117] "RemoveContainer" containerID="e26c380b9e3c7a5dc2fec19a469a1ba3ec7073bff64fb3eac4cc5c5188052be6" Dec 05 06:31:32 crc kubenswrapper[4742]: E1205 06:31:32.383843 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:31:45 crc kubenswrapper[4742]: I1205 06:31:45.383272 4742 scope.go:117] "RemoveContainer" containerID="e26c380b9e3c7a5dc2fec19a469a1ba3ec7073bff64fb3eac4cc5c5188052be6" Dec 05 06:31:45 crc kubenswrapper[4742]: E1205 06:31:45.384005 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:31:57 crc kubenswrapper[4742]: I1205 06:31:57.383269 4742 scope.go:117] "RemoveContainer" containerID="e26c380b9e3c7a5dc2fec19a469a1ba3ec7073bff64fb3eac4cc5c5188052be6" Dec 05 06:31:57 crc kubenswrapper[4742]: E1205 06:31:57.385767 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:32:09 crc kubenswrapper[4742]: I1205 06:32:09.383007 4742 scope.go:117] "RemoveContainer" containerID="e26c380b9e3c7a5dc2fec19a469a1ba3ec7073bff64fb3eac4cc5c5188052be6" Dec 05 06:32:09 crc kubenswrapper[4742]: E1205 06:32:09.383733 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:32:20 crc kubenswrapper[4742]: I1205 06:32:20.383656 4742 scope.go:117] "RemoveContainer" containerID="e26c380b9e3c7a5dc2fec19a469a1ba3ec7073bff64fb3eac4cc5c5188052be6" Dec 05 06:32:20 crc kubenswrapper[4742]: E1205 06:32:20.384707 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:32:34 crc kubenswrapper[4742]: I1205 06:32:34.393028 4742 scope.go:117] "RemoveContainer" containerID="e26c380b9e3c7a5dc2fec19a469a1ba3ec7073bff64fb3eac4cc5c5188052be6" Dec 05 06:32:34 crc kubenswrapper[4742]: E1205 06:32:34.394186 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:32:35 crc kubenswrapper[4742]: I1205 06:32:35.117917 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-xlqjg"] Dec 05 06:32:35 crc kubenswrapper[4742]: E1205 06:32:35.120697 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="985cdaf6-04f9-4f75-85aa-369243b9290f" containerName="collect-profiles" Dec 05 06:32:35 crc kubenswrapper[4742]: I1205 06:32:35.120740 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="985cdaf6-04f9-4f75-85aa-369243b9290f" containerName="collect-profiles" Dec 05 06:32:35 crc kubenswrapper[4742]: I1205 06:32:35.121158 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="985cdaf6-04f9-4f75-85aa-369243b9290f" containerName="collect-profiles" Dec 05 06:32:35 crc kubenswrapper[4742]: I1205 06:32:35.123317 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xlqjg" Dec 05 06:32:35 crc kubenswrapper[4742]: I1205 06:32:35.146550 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xlqjg"] Dec 05 06:32:35 crc kubenswrapper[4742]: I1205 06:32:35.236770 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/371a7310-e628-4081-b4ca-a47868a1f204-catalog-content\") pod \"certified-operators-xlqjg\" (UID: \"371a7310-e628-4081-b4ca-a47868a1f204\") " pod="openshift-marketplace/certified-operators-xlqjg" Dec 05 06:32:35 crc kubenswrapper[4742]: I1205 06:32:35.236882 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/371a7310-e628-4081-b4ca-a47868a1f204-utilities\") pod \"certified-operators-xlqjg\" (UID: \"371a7310-e628-4081-b4ca-a47868a1f204\") " pod="openshift-marketplace/certified-operators-xlqjg" Dec 05 06:32:35 crc kubenswrapper[4742]: I1205 06:32:35.237569 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7q9tj\" (UniqueName: \"kubernetes.io/projected/371a7310-e628-4081-b4ca-a47868a1f204-kube-api-access-7q9tj\") pod \"certified-operators-xlqjg\" (UID: \"371a7310-e628-4081-b4ca-a47868a1f204\") " pod="openshift-marketplace/certified-operators-xlqjg" Dec 05 06:32:35 crc kubenswrapper[4742]: I1205 06:32:35.339427 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/371a7310-e628-4081-b4ca-a47868a1f204-catalog-content\") pod \"certified-operators-xlqjg\" (UID: \"371a7310-e628-4081-b4ca-a47868a1f204\") " pod="openshift-marketplace/certified-operators-xlqjg" Dec 05 06:32:35 crc kubenswrapper[4742]: I1205 06:32:35.339514 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/371a7310-e628-4081-b4ca-a47868a1f204-utilities\") pod \"certified-operators-xlqjg\" (UID: \"371a7310-e628-4081-b4ca-a47868a1f204\") " pod="openshift-marketplace/certified-operators-xlqjg" Dec 05 06:32:35 crc kubenswrapper[4742]: I1205 06:32:35.339587 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7q9tj\" (UniqueName: \"kubernetes.io/projected/371a7310-e628-4081-b4ca-a47868a1f204-kube-api-access-7q9tj\") pod \"certified-operators-xlqjg\" (UID: \"371a7310-e628-4081-b4ca-a47868a1f204\") " pod="openshift-marketplace/certified-operators-xlqjg" Dec 05 06:32:35 crc kubenswrapper[4742]: I1205 06:32:35.339954 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/371a7310-e628-4081-b4ca-a47868a1f204-catalog-content\") pod \"certified-operators-xlqjg\" (UID: \"371a7310-e628-4081-b4ca-a47868a1f204\") " pod="openshift-marketplace/certified-operators-xlqjg" Dec 05 06:32:35 crc kubenswrapper[4742]: I1205 06:32:35.340175 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/371a7310-e628-4081-b4ca-a47868a1f204-utilities\") pod \"certified-operators-xlqjg\" (UID: \"371a7310-e628-4081-b4ca-a47868a1f204\") " pod="openshift-marketplace/certified-operators-xlqjg" Dec 05 06:32:35 crc kubenswrapper[4742]: I1205 06:32:35.364641 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7q9tj\" (UniqueName: \"kubernetes.io/projected/371a7310-e628-4081-b4ca-a47868a1f204-kube-api-access-7q9tj\") pod \"certified-operators-xlqjg\" (UID: \"371a7310-e628-4081-b4ca-a47868a1f204\") " pod="openshift-marketplace/certified-operators-xlqjg" Dec 05 06:32:35 crc kubenswrapper[4742]: I1205 06:32:35.461742 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xlqjg" Dec 05 06:32:35 crc kubenswrapper[4742]: I1205 06:32:35.952390 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xlqjg"] Dec 05 06:32:36 crc kubenswrapper[4742]: I1205 06:32:36.588368 4742 generic.go:334] "Generic (PLEG): container finished" podID="371a7310-e628-4081-b4ca-a47868a1f204" containerID="f75292340a3c05e55cedfc7cf6b0a43c34a70e49bf732d45ebcd131afa6e3ea5" exitCode=0 Dec 05 06:32:36 crc kubenswrapper[4742]: I1205 06:32:36.588434 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xlqjg" event={"ID":"371a7310-e628-4081-b4ca-a47868a1f204","Type":"ContainerDied","Data":"f75292340a3c05e55cedfc7cf6b0a43c34a70e49bf732d45ebcd131afa6e3ea5"} Dec 05 06:32:36 crc kubenswrapper[4742]: I1205 06:32:36.588477 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xlqjg" event={"ID":"371a7310-e628-4081-b4ca-a47868a1f204","Type":"ContainerStarted","Data":"639a633fa3366ceb4b8a8a64589f9d3ec2f37b0621e69e8bb78975adde23f3a4"} Dec 05 06:32:37 crc kubenswrapper[4742]: I1205 06:32:37.608313 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xlqjg" event={"ID":"371a7310-e628-4081-b4ca-a47868a1f204","Type":"ContainerStarted","Data":"69ca0621f11b3655cc5b76cd0dabff6fc5556c29ba2529c387554ac2f2155b5f"} Dec 05 06:32:38 crc kubenswrapper[4742]: I1205 06:32:38.622378 4742 generic.go:334] "Generic (PLEG): container finished" podID="371a7310-e628-4081-b4ca-a47868a1f204" containerID="69ca0621f11b3655cc5b76cd0dabff6fc5556c29ba2529c387554ac2f2155b5f" exitCode=0 Dec 05 06:32:38 crc kubenswrapper[4742]: I1205 06:32:38.622495 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xlqjg" event={"ID":"371a7310-e628-4081-b4ca-a47868a1f204","Type":"ContainerDied","Data":"69ca0621f11b3655cc5b76cd0dabff6fc5556c29ba2529c387554ac2f2155b5f"} Dec 05 06:32:39 crc kubenswrapper[4742]: I1205 06:32:39.635318 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xlqjg" event={"ID":"371a7310-e628-4081-b4ca-a47868a1f204","Type":"ContainerStarted","Data":"e0739b3e9bff06c90e1469c41b4a46e803204be8a430bcc981fc376dd4d9be33"} Dec 05 06:32:39 crc kubenswrapper[4742]: I1205 06:32:39.663827 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-xlqjg" podStartSLOduration=2.178248186 podStartE2EDuration="4.663800718s" podCreationTimestamp="2025-12-05 06:32:35 +0000 UTC" firstStartedPulling="2025-12-05 06:32:36.591177545 +0000 UTC m=+2432.503312647" lastFinishedPulling="2025-12-05 06:32:39.076730087 +0000 UTC m=+2434.988865179" observedRunningTime="2025-12-05 06:32:39.658617791 +0000 UTC m=+2435.570752853" watchObservedRunningTime="2025-12-05 06:32:39.663800718 +0000 UTC m=+2435.575935820" Dec 05 06:32:45 crc kubenswrapper[4742]: I1205 06:32:45.383365 4742 scope.go:117] "RemoveContainer" containerID="e26c380b9e3c7a5dc2fec19a469a1ba3ec7073bff64fb3eac4cc5c5188052be6" Dec 05 06:32:45 crc kubenswrapper[4742]: E1205 06:32:45.384011 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:32:45 crc kubenswrapper[4742]: I1205 06:32:45.462424 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-xlqjg" Dec 05 06:32:45 crc kubenswrapper[4742]: I1205 06:32:45.462473 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-xlqjg" Dec 05 06:32:45 crc kubenswrapper[4742]: I1205 06:32:45.537103 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-xlqjg" Dec 05 06:32:45 crc kubenswrapper[4742]: I1205 06:32:45.749135 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-xlqjg" Dec 05 06:32:45 crc kubenswrapper[4742]: I1205 06:32:45.802839 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xlqjg"] Dec 05 06:32:47 crc kubenswrapper[4742]: I1205 06:32:47.706599 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-xlqjg" podUID="371a7310-e628-4081-b4ca-a47868a1f204" containerName="registry-server" containerID="cri-o://e0739b3e9bff06c90e1469c41b4a46e803204be8a430bcc981fc376dd4d9be33" gracePeriod=2 Dec 05 06:32:48 crc kubenswrapper[4742]: I1205 06:32:48.716525 4742 generic.go:334] "Generic (PLEG): container finished" podID="371a7310-e628-4081-b4ca-a47868a1f204" containerID="e0739b3e9bff06c90e1469c41b4a46e803204be8a430bcc981fc376dd4d9be33" exitCode=0 Dec 05 06:32:48 crc kubenswrapper[4742]: I1205 06:32:48.716563 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xlqjg" event={"ID":"371a7310-e628-4081-b4ca-a47868a1f204","Type":"ContainerDied","Data":"e0739b3e9bff06c90e1469c41b4a46e803204be8a430bcc981fc376dd4d9be33"} Dec 05 06:32:48 crc kubenswrapper[4742]: I1205 06:32:48.716813 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xlqjg" event={"ID":"371a7310-e628-4081-b4ca-a47868a1f204","Type":"ContainerDied","Data":"639a633fa3366ceb4b8a8a64589f9d3ec2f37b0621e69e8bb78975adde23f3a4"} Dec 05 06:32:48 crc kubenswrapper[4742]: I1205 06:32:48.716824 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="639a633fa3366ceb4b8a8a64589f9d3ec2f37b0621e69e8bb78975adde23f3a4" Dec 05 06:32:48 crc kubenswrapper[4742]: I1205 06:32:48.723180 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xlqjg" Dec 05 06:32:48 crc kubenswrapper[4742]: I1205 06:32:48.868149 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/371a7310-e628-4081-b4ca-a47868a1f204-catalog-content\") pod \"371a7310-e628-4081-b4ca-a47868a1f204\" (UID: \"371a7310-e628-4081-b4ca-a47868a1f204\") " Dec 05 06:32:48 crc kubenswrapper[4742]: I1205 06:32:48.868224 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7q9tj\" (UniqueName: \"kubernetes.io/projected/371a7310-e628-4081-b4ca-a47868a1f204-kube-api-access-7q9tj\") pod \"371a7310-e628-4081-b4ca-a47868a1f204\" (UID: \"371a7310-e628-4081-b4ca-a47868a1f204\") " Dec 05 06:32:48 crc kubenswrapper[4742]: I1205 06:32:48.868297 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/371a7310-e628-4081-b4ca-a47868a1f204-utilities\") pod \"371a7310-e628-4081-b4ca-a47868a1f204\" (UID: \"371a7310-e628-4081-b4ca-a47868a1f204\") " Dec 05 06:32:48 crc kubenswrapper[4742]: I1205 06:32:48.869796 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/371a7310-e628-4081-b4ca-a47868a1f204-utilities" (OuterVolumeSpecName: "utilities") pod "371a7310-e628-4081-b4ca-a47868a1f204" (UID: "371a7310-e628-4081-b4ca-a47868a1f204"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:32:48 crc kubenswrapper[4742]: I1205 06:32:48.876448 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/371a7310-e628-4081-b4ca-a47868a1f204-kube-api-access-7q9tj" (OuterVolumeSpecName: "kube-api-access-7q9tj") pod "371a7310-e628-4081-b4ca-a47868a1f204" (UID: "371a7310-e628-4081-b4ca-a47868a1f204"). InnerVolumeSpecName "kube-api-access-7q9tj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:32:48 crc kubenswrapper[4742]: I1205 06:32:48.920976 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/371a7310-e628-4081-b4ca-a47868a1f204-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "371a7310-e628-4081-b4ca-a47868a1f204" (UID: "371a7310-e628-4081-b4ca-a47868a1f204"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:32:48 crc kubenswrapper[4742]: I1205 06:32:48.970009 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/371a7310-e628-4081-b4ca-a47868a1f204-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:32:48 crc kubenswrapper[4742]: I1205 06:32:48.970112 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7q9tj\" (UniqueName: \"kubernetes.io/projected/371a7310-e628-4081-b4ca-a47868a1f204-kube-api-access-7q9tj\") on node \"crc\" DevicePath \"\"" Dec 05 06:32:48 crc kubenswrapper[4742]: I1205 06:32:48.970142 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/371a7310-e628-4081-b4ca-a47868a1f204-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:32:49 crc kubenswrapper[4742]: I1205 06:32:49.724267 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xlqjg" Dec 05 06:32:49 crc kubenswrapper[4742]: I1205 06:32:49.762604 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xlqjg"] Dec 05 06:32:49 crc kubenswrapper[4742]: I1205 06:32:49.768340 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-xlqjg"] Dec 05 06:32:50 crc kubenswrapper[4742]: I1205 06:32:50.395514 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="371a7310-e628-4081-b4ca-a47868a1f204" path="/var/lib/kubelet/pods/371a7310-e628-4081-b4ca-a47868a1f204/volumes" Dec 05 06:32:58 crc kubenswrapper[4742]: I1205 06:32:58.382693 4742 scope.go:117] "RemoveContainer" containerID="e26c380b9e3c7a5dc2fec19a469a1ba3ec7073bff64fb3eac4cc5c5188052be6" Dec 05 06:32:58 crc kubenswrapper[4742]: E1205 06:32:58.383404 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:33:10 crc kubenswrapper[4742]: I1205 06:33:10.383347 4742 scope.go:117] "RemoveContainer" containerID="e26c380b9e3c7a5dc2fec19a469a1ba3ec7073bff64fb3eac4cc5c5188052be6" Dec 05 06:33:10 crc kubenswrapper[4742]: E1205 06:33:10.384440 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:33:22 crc kubenswrapper[4742]: I1205 06:33:22.384861 4742 scope.go:117] "RemoveContainer" containerID="e26c380b9e3c7a5dc2fec19a469a1ba3ec7073bff64fb3eac4cc5c5188052be6" Dec 05 06:33:22 crc kubenswrapper[4742]: E1205 06:33:22.386015 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:33:35 crc kubenswrapper[4742]: I1205 06:33:35.383515 4742 scope.go:117] "RemoveContainer" containerID="e26c380b9e3c7a5dc2fec19a469a1ba3ec7073bff64fb3eac4cc5c5188052be6" Dec 05 06:33:35 crc kubenswrapper[4742]: E1205 06:33:35.384634 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:33:48 crc kubenswrapper[4742]: I1205 06:33:48.383473 4742 scope.go:117] "RemoveContainer" containerID="e26c380b9e3c7a5dc2fec19a469a1ba3ec7073bff64fb3eac4cc5c5188052be6" Dec 05 06:33:48 crc kubenswrapper[4742]: E1205 06:33:48.384376 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:34:00 crc kubenswrapper[4742]: I1205 06:34:00.383455 4742 scope.go:117] "RemoveContainer" containerID="e26c380b9e3c7a5dc2fec19a469a1ba3ec7073bff64fb3eac4cc5c5188052be6" Dec 05 06:34:00 crc kubenswrapper[4742]: E1205 06:34:00.384421 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:34:11 crc kubenswrapper[4742]: I1205 06:34:11.383129 4742 scope.go:117] "RemoveContainer" containerID="e26c380b9e3c7a5dc2fec19a469a1ba3ec7073bff64fb3eac4cc5c5188052be6" Dec 05 06:34:11 crc kubenswrapper[4742]: E1205 06:34:11.385812 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:34:24 crc kubenswrapper[4742]: I1205 06:34:24.391949 4742 scope.go:117] "RemoveContainer" containerID="e26c380b9e3c7a5dc2fec19a469a1ba3ec7073bff64fb3eac4cc5c5188052be6" Dec 05 06:34:24 crc kubenswrapper[4742]: E1205 06:34:24.392822 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:34:39 crc kubenswrapper[4742]: I1205 06:34:39.383045 4742 scope.go:117] "RemoveContainer" containerID="e26c380b9e3c7a5dc2fec19a469a1ba3ec7073bff64fb3eac4cc5c5188052be6" Dec 05 06:34:39 crc kubenswrapper[4742]: E1205 06:34:39.384018 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:34:51 crc kubenswrapper[4742]: I1205 06:34:51.383310 4742 scope.go:117] "RemoveContainer" containerID="e26c380b9e3c7a5dc2fec19a469a1ba3ec7073bff64fb3eac4cc5c5188052be6" Dec 05 06:34:51 crc kubenswrapper[4742]: E1205 06:34:51.384515 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:35:02 crc kubenswrapper[4742]: I1205 06:35:02.382789 4742 scope.go:117] "RemoveContainer" containerID="e26c380b9e3c7a5dc2fec19a469a1ba3ec7073bff64fb3eac4cc5c5188052be6" Dec 05 06:35:02 crc kubenswrapper[4742]: E1205 06:35:02.384482 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:35:14 crc kubenswrapper[4742]: I1205 06:35:14.395109 4742 scope.go:117] "RemoveContainer" containerID="e26c380b9e3c7a5dc2fec19a469a1ba3ec7073bff64fb3eac4cc5c5188052be6" Dec 05 06:35:14 crc kubenswrapper[4742]: E1205 06:35:14.396147 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:35:25 crc kubenswrapper[4742]: I1205 06:35:25.382327 4742 scope.go:117] "RemoveContainer" containerID="e26c380b9e3c7a5dc2fec19a469a1ba3ec7073bff64fb3eac4cc5c5188052be6" Dec 05 06:35:26 crc kubenswrapper[4742]: I1205 06:35:26.255129 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerStarted","Data":"11632c4bddc650d6853791ae73228e723ef7db19eea7e97ff0f84f7b29e2f3c8"} Dec 05 06:37:46 crc kubenswrapper[4742]: I1205 06:37:46.671137 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:37:46 crc kubenswrapper[4742]: I1205 06:37:46.671733 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:38:16 crc kubenswrapper[4742]: I1205 06:38:16.670568 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:38:16 crc kubenswrapper[4742]: I1205 06:38:16.671262 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:38:46 crc kubenswrapper[4742]: I1205 06:38:46.671252 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:38:46 crc kubenswrapper[4742]: I1205 06:38:46.672268 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:38:46 crc kubenswrapper[4742]: I1205 06:38:46.672359 4742 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" Dec 05 06:38:46 crc kubenswrapper[4742]: I1205 06:38:46.673325 4742 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"11632c4bddc650d6853791ae73228e723ef7db19eea7e97ff0f84f7b29e2f3c8"} pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 06:38:46 crc kubenswrapper[4742]: I1205 06:38:46.673471 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" containerID="cri-o://11632c4bddc650d6853791ae73228e723ef7db19eea7e97ff0f84f7b29e2f3c8" gracePeriod=600 Dec 05 06:38:47 crc kubenswrapper[4742]: I1205 06:38:47.078170 4742 generic.go:334] "Generic (PLEG): container finished" podID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerID="11632c4bddc650d6853791ae73228e723ef7db19eea7e97ff0f84f7b29e2f3c8" exitCode=0 Dec 05 06:38:47 crc kubenswrapper[4742]: I1205 06:38:47.078304 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerDied","Data":"11632c4bddc650d6853791ae73228e723ef7db19eea7e97ff0f84f7b29e2f3c8"} Dec 05 06:38:47 crc kubenswrapper[4742]: I1205 06:38:47.078567 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerStarted","Data":"a59a77537625ce4110ff82ab10c7438e7309872fd141eaad289cdc74636f3479"} Dec 05 06:38:47 crc kubenswrapper[4742]: I1205 06:38:47.078601 4742 scope.go:117] "RemoveContainer" containerID="e26c380b9e3c7a5dc2fec19a469a1ba3ec7073bff64fb3eac4cc5c5188052be6" Dec 05 06:39:12 crc kubenswrapper[4742]: I1205 06:39:12.789497 4742 scope.go:117] "RemoveContainer" containerID="e0739b3e9bff06c90e1469c41b4a46e803204be8a430bcc981fc376dd4d9be33" Dec 05 06:39:12 crc kubenswrapper[4742]: I1205 06:39:12.820315 4742 scope.go:117] "RemoveContainer" containerID="69ca0621f11b3655cc5b76cd0dabff6fc5556c29ba2529c387554ac2f2155b5f" Dec 05 06:39:12 crc kubenswrapper[4742]: I1205 06:39:12.859551 4742 scope.go:117] "RemoveContainer" containerID="f75292340a3c05e55cedfc7cf6b0a43c34a70e49bf732d45ebcd131afa6e3ea5" Dec 05 06:39:45 crc kubenswrapper[4742]: I1205 06:39:45.427995 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8qfth"] Dec 05 06:39:45 crc kubenswrapper[4742]: E1205 06:39:45.429327 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="371a7310-e628-4081-b4ca-a47868a1f204" containerName="registry-server" Dec 05 06:39:45 crc kubenswrapper[4742]: I1205 06:39:45.429354 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="371a7310-e628-4081-b4ca-a47868a1f204" containerName="registry-server" Dec 05 06:39:45 crc kubenswrapper[4742]: E1205 06:39:45.429391 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="371a7310-e628-4081-b4ca-a47868a1f204" containerName="extract-content" Dec 05 06:39:45 crc kubenswrapper[4742]: I1205 06:39:45.429406 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="371a7310-e628-4081-b4ca-a47868a1f204" containerName="extract-content" Dec 05 06:39:45 crc kubenswrapper[4742]: E1205 06:39:45.429448 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="371a7310-e628-4081-b4ca-a47868a1f204" containerName="extract-utilities" Dec 05 06:39:45 crc kubenswrapper[4742]: I1205 06:39:45.429462 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="371a7310-e628-4081-b4ca-a47868a1f204" containerName="extract-utilities" Dec 05 06:39:45 crc kubenswrapper[4742]: I1205 06:39:45.429739 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="371a7310-e628-4081-b4ca-a47868a1f204" containerName="registry-server" Dec 05 06:39:45 crc kubenswrapper[4742]: I1205 06:39:45.431856 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8qfth" Dec 05 06:39:45 crc kubenswrapper[4742]: I1205 06:39:45.442708 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8qfth"] Dec 05 06:39:45 crc kubenswrapper[4742]: I1205 06:39:45.502750 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scclz\" (UniqueName: \"kubernetes.io/projected/1b4aab07-75be-4124-805b-f7d5f5a5e6d2-kube-api-access-scclz\") pod \"redhat-marketplace-8qfth\" (UID: \"1b4aab07-75be-4124-805b-f7d5f5a5e6d2\") " pod="openshift-marketplace/redhat-marketplace-8qfth" Dec 05 06:39:45 crc kubenswrapper[4742]: I1205 06:39:45.502870 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b4aab07-75be-4124-805b-f7d5f5a5e6d2-utilities\") pod \"redhat-marketplace-8qfth\" (UID: \"1b4aab07-75be-4124-805b-f7d5f5a5e6d2\") " pod="openshift-marketplace/redhat-marketplace-8qfth" Dec 05 06:39:45 crc kubenswrapper[4742]: I1205 06:39:45.502997 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b4aab07-75be-4124-805b-f7d5f5a5e6d2-catalog-content\") pod \"redhat-marketplace-8qfth\" (UID: \"1b4aab07-75be-4124-805b-f7d5f5a5e6d2\") " pod="openshift-marketplace/redhat-marketplace-8qfth" Dec 05 06:39:45 crc kubenswrapper[4742]: I1205 06:39:45.604504 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scclz\" (UniqueName: \"kubernetes.io/projected/1b4aab07-75be-4124-805b-f7d5f5a5e6d2-kube-api-access-scclz\") pod \"redhat-marketplace-8qfth\" (UID: \"1b4aab07-75be-4124-805b-f7d5f5a5e6d2\") " pod="openshift-marketplace/redhat-marketplace-8qfth" Dec 05 06:39:45 crc kubenswrapper[4742]: I1205 06:39:45.604559 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b4aab07-75be-4124-805b-f7d5f5a5e6d2-utilities\") pod \"redhat-marketplace-8qfth\" (UID: \"1b4aab07-75be-4124-805b-f7d5f5a5e6d2\") " pod="openshift-marketplace/redhat-marketplace-8qfth" Dec 05 06:39:45 crc kubenswrapper[4742]: I1205 06:39:45.604584 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b4aab07-75be-4124-805b-f7d5f5a5e6d2-catalog-content\") pod \"redhat-marketplace-8qfth\" (UID: \"1b4aab07-75be-4124-805b-f7d5f5a5e6d2\") " pod="openshift-marketplace/redhat-marketplace-8qfth" Dec 05 06:39:45 crc kubenswrapper[4742]: I1205 06:39:45.605096 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b4aab07-75be-4124-805b-f7d5f5a5e6d2-utilities\") pod \"redhat-marketplace-8qfth\" (UID: \"1b4aab07-75be-4124-805b-f7d5f5a5e6d2\") " pod="openshift-marketplace/redhat-marketplace-8qfth" Dec 05 06:39:45 crc kubenswrapper[4742]: I1205 06:39:45.606206 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b4aab07-75be-4124-805b-f7d5f5a5e6d2-catalog-content\") pod \"redhat-marketplace-8qfth\" (UID: \"1b4aab07-75be-4124-805b-f7d5f5a5e6d2\") " pod="openshift-marketplace/redhat-marketplace-8qfth" Dec 05 06:39:45 crc kubenswrapper[4742]: I1205 06:39:45.630132 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scclz\" (UniqueName: \"kubernetes.io/projected/1b4aab07-75be-4124-805b-f7d5f5a5e6d2-kube-api-access-scclz\") pod \"redhat-marketplace-8qfth\" (UID: \"1b4aab07-75be-4124-805b-f7d5f5a5e6d2\") " pod="openshift-marketplace/redhat-marketplace-8qfth" Dec 05 06:39:45 crc kubenswrapper[4742]: I1205 06:39:45.762394 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8qfth" Dec 05 06:39:46 crc kubenswrapper[4742]: I1205 06:39:46.229987 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8qfth"] Dec 05 06:39:46 crc kubenswrapper[4742]: W1205 06:39:46.236317 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1b4aab07_75be_4124_805b_f7d5f5a5e6d2.slice/crio-9fd31b4c7cac98c3be7295e84f7352b03a2dccb93abe555f7137af98d78e34e6 WatchSource:0}: Error finding container 9fd31b4c7cac98c3be7295e84f7352b03a2dccb93abe555f7137af98d78e34e6: Status 404 returned error can't find the container with id 9fd31b4c7cac98c3be7295e84f7352b03a2dccb93abe555f7137af98d78e34e6 Dec 05 06:39:46 crc kubenswrapper[4742]: I1205 06:39:46.655166 4742 generic.go:334] "Generic (PLEG): container finished" podID="1b4aab07-75be-4124-805b-f7d5f5a5e6d2" containerID="473c1fc6c962233118a9aef3ceaf04298bc8e9460f85192b7f07b7dc56ec14c8" exitCode=0 Dec 05 06:39:46 crc kubenswrapper[4742]: I1205 06:39:46.655212 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8qfth" event={"ID":"1b4aab07-75be-4124-805b-f7d5f5a5e6d2","Type":"ContainerDied","Data":"473c1fc6c962233118a9aef3ceaf04298bc8e9460f85192b7f07b7dc56ec14c8"} Dec 05 06:39:46 crc kubenswrapper[4742]: I1205 06:39:46.655237 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8qfth" event={"ID":"1b4aab07-75be-4124-805b-f7d5f5a5e6d2","Type":"ContainerStarted","Data":"9fd31b4c7cac98c3be7295e84f7352b03a2dccb93abe555f7137af98d78e34e6"} Dec 05 06:39:46 crc kubenswrapper[4742]: I1205 06:39:46.657423 4742 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 06:39:47 crc kubenswrapper[4742]: I1205 06:39:47.669004 4742 generic.go:334] "Generic (PLEG): container finished" podID="1b4aab07-75be-4124-805b-f7d5f5a5e6d2" containerID="7e73c1b0eeb9afe85908403c92366d29fb19cc28add3b5aabba1450e316ccea4" exitCode=0 Dec 05 06:39:47 crc kubenswrapper[4742]: I1205 06:39:47.669650 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8qfth" event={"ID":"1b4aab07-75be-4124-805b-f7d5f5a5e6d2","Type":"ContainerDied","Data":"7e73c1b0eeb9afe85908403c92366d29fb19cc28add3b5aabba1450e316ccea4"} Dec 05 06:39:48 crc kubenswrapper[4742]: I1205 06:39:48.680195 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8qfth" event={"ID":"1b4aab07-75be-4124-805b-f7d5f5a5e6d2","Type":"ContainerStarted","Data":"7654d96c211777c537e8d5ed713140aaa50e2eb4afc6b24f9f43b4ca52d7345d"} Dec 05 06:39:48 crc kubenswrapper[4742]: I1205 06:39:48.704150 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8qfth" podStartSLOduration=2.302606334 podStartE2EDuration="3.704131132s" podCreationTimestamp="2025-12-05 06:39:45 +0000 UTC" firstStartedPulling="2025-12-05 06:39:46.65711459 +0000 UTC m=+2862.569249662" lastFinishedPulling="2025-12-05 06:39:48.058639368 +0000 UTC m=+2863.970774460" observedRunningTime="2025-12-05 06:39:48.701506572 +0000 UTC m=+2864.613641704" watchObservedRunningTime="2025-12-05 06:39:48.704131132 +0000 UTC m=+2864.616266204" Dec 05 06:39:53 crc kubenswrapper[4742]: I1205 06:39:53.699984 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2md54"] Dec 05 06:39:53 crc kubenswrapper[4742]: I1205 06:39:53.704020 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2md54" Dec 05 06:39:53 crc kubenswrapper[4742]: I1205 06:39:53.716371 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2md54"] Dec 05 06:39:53 crc kubenswrapper[4742]: I1205 06:39:53.725653 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/645b74a5-9eeb-4a9f-8f4c-c95007fde3c0-catalog-content\") pod \"redhat-operators-2md54\" (UID: \"645b74a5-9eeb-4a9f-8f4c-c95007fde3c0\") " pod="openshift-marketplace/redhat-operators-2md54" Dec 05 06:39:53 crc kubenswrapper[4742]: I1205 06:39:53.725744 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/645b74a5-9eeb-4a9f-8f4c-c95007fde3c0-utilities\") pod \"redhat-operators-2md54\" (UID: \"645b74a5-9eeb-4a9f-8f4c-c95007fde3c0\") " pod="openshift-marketplace/redhat-operators-2md54" Dec 05 06:39:53 crc kubenswrapper[4742]: I1205 06:39:53.725786 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzpch\" (UniqueName: \"kubernetes.io/projected/645b74a5-9eeb-4a9f-8f4c-c95007fde3c0-kube-api-access-jzpch\") pod \"redhat-operators-2md54\" (UID: \"645b74a5-9eeb-4a9f-8f4c-c95007fde3c0\") " pod="openshift-marketplace/redhat-operators-2md54" Dec 05 06:39:53 crc kubenswrapper[4742]: I1205 06:39:53.827078 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/645b74a5-9eeb-4a9f-8f4c-c95007fde3c0-catalog-content\") pod \"redhat-operators-2md54\" (UID: \"645b74a5-9eeb-4a9f-8f4c-c95007fde3c0\") " pod="openshift-marketplace/redhat-operators-2md54" Dec 05 06:39:53 crc kubenswrapper[4742]: I1205 06:39:53.827174 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/645b74a5-9eeb-4a9f-8f4c-c95007fde3c0-utilities\") pod \"redhat-operators-2md54\" (UID: \"645b74a5-9eeb-4a9f-8f4c-c95007fde3c0\") " pod="openshift-marketplace/redhat-operators-2md54" Dec 05 06:39:53 crc kubenswrapper[4742]: I1205 06:39:53.827214 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzpch\" (UniqueName: \"kubernetes.io/projected/645b74a5-9eeb-4a9f-8f4c-c95007fde3c0-kube-api-access-jzpch\") pod \"redhat-operators-2md54\" (UID: \"645b74a5-9eeb-4a9f-8f4c-c95007fde3c0\") " pod="openshift-marketplace/redhat-operators-2md54" Dec 05 06:39:53 crc kubenswrapper[4742]: I1205 06:39:53.828188 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/645b74a5-9eeb-4a9f-8f4c-c95007fde3c0-catalog-content\") pod \"redhat-operators-2md54\" (UID: \"645b74a5-9eeb-4a9f-8f4c-c95007fde3c0\") " pod="openshift-marketplace/redhat-operators-2md54" Dec 05 06:39:53 crc kubenswrapper[4742]: I1205 06:39:53.828264 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/645b74a5-9eeb-4a9f-8f4c-c95007fde3c0-utilities\") pod \"redhat-operators-2md54\" (UID: \"645b74a5-9eeb-4a9f-8f4c-c95007fde3c0\") " pod="openshift-marketplace/redhat-operators-2md54" Dec 05 06:39:53 crc kubenswrapper[4742]: I1205 06:39:53.862501 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzpch\" (UniqueName: \"kubernetes.io/projected/645b74a5-9eeb-4a9f-8f4c-c95007fde3c0-kube-api-access-jzpch\") pod \"redhat-operators-2md54\" (UID: \"645b74a5-9eeb-4a9f-8f4c-c95007fde3c0\") " pod="openshift-marketplace/redhat-operators-2md54" Dec 05 06:39:54 crc kubenswrapper[4742]: I1205 06:39:54.052864 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2md54" Dec 05 06:39:54 crc kubenswrapper[4742]: I1205 06:39:54.525697 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2md54"] Dec 05 06:39:54 crc kubenswrapper[4742]: I1205 06:39:54.756374 4742 generic.go:334] "Generic (PLEG): container finished" podID="645b74a5-9eeb-4a9f-8f4c-c95007fde3c0" containerID="c0df4eab234d399599db776fa36e3f29f54721ce55d07f5730242dd701fb2a93" exitCode=0 Dec 05 06:39:54 crc kubenswrapper[4742]: I1205 06:39:54.756455 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2md54" event={"ID":"645b74a5-9eeb-4a9f-8f4c-c95007fde3c0","Type":"ContainerDied","Data":"c0df4eab234d399599db776fa36e3f29f54721ce55d07f5730242dd701fb2a93"} Dec 05 06:39:54 crc kubenswrapper[4742]: I1205 06:39:54.756496 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2md54" event={"ID":"645b74a5-9eeb-4a9f-8f4c-c95007fde3c0","Type":"ContainerStarted","Data":"2e4206ead1741eb001a273a5a7c7a1557dc39fb035a2ca9a18a8a0498d0b669f"} Dec 05 06:39:55 crc kubenswrapper[4742]: I1205 06:39:55.762470 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8qfth" Dec 05 06:39:55 crc kubenswrapper[4742]: I1205 06:39:55.762785 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8qfth" Dec 05 06:39:55 crc kubenswrapper[4742]: I1205 06:39:55.772180 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2md54" event={"ID":"645b74a5-9eeb-4a9f-8f4c-c95007fde3c0","Type":"ContainerStarted","Data":"862b138c8ad35afdacf0febed3522e3f0bbdedb7f7144f2aec1b65ab69244b55"} Dec 05 06:39:55 crc kubenswrapper[4742]: I1205 06:39:55.821574 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8qfth" Dec 05 06:39:56 crc kubenswrapper[4742]: I1205 06:39:56.781529 4742 generic.go:334] "Generic (PLEG): container finished" podID="645b74a5-9eeb-4a9f-8f4c-c95007fde3c0" containerID="862b138c8ad35afdacf0febed3522e3f0bbdedb7f7144f2aec1b65ab69244b55" exitCode=0 Dec 05 06:39:56 crc kubenswrapper[4742]: I1205 06:39:56.781602 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2md54" event={"ID":"645b74a5-9eeb-4a9f-8f4c-c95007fde3c0","Type":"ContainerDied","Data":"862b138c8ad35afdacf0febed3522e3f0bbdedb7f7144f2aec1b65ab69244b55"} Dec 05 06:39:56 crc kubenswrapper[4742]: I1205 06:39:56.828740 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8qfth" Dec 05 06:39:57 crc kubenswrapper[4742]: I1205 06:39:57.788558 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2md54" event={"ID":"645b74a5-9eeb-4a9f-8f4c-c95007fde3c0","Type":"ContainerStarted","Data":"d545d17c148ff40ccde1832183ec916435ef9c09b864600ec4905a695542d9db"} Dec 05 06:39:57 crc kubenswrapper[4742]: I1205 06:39:57.807138 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2md54" podStartSLOduration=2.368861043 podStartE2EDuration="4.807119399s" podCreationTimestamp="2025-12-05 06:39:53 +0000 UTC" firstStartedPulling="2025-12-05 06:39:54.75845655 +0000 UTC m=+2870.670591612" lastFinishedPulling="2025-12-05 06:39:57.196714866 +0000 UTC m=+2873.108849968" observedRunningTime="2025-12-05 06:39:57.804619833 +0000 UTC m=+2873.716754915" watchObservedRunningTime="2025-12-05 06:39:57.807119399 +0000 UTC m=+2873.719254461" Dec 05 06:39:58 crc kubenswrapper[4742]: I1205 06:39:58.068507 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8qfth"] Dec 05 06:39:58 crc kubenswrapper[4742]: I1205 06:39:58.795091 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-8qfth" podUID="1b4aab07-75be-4124-805b-f7d5f5a5e6d2" containerName="registry-server" containerID="cri-o://7654d96c211777c537e8d5ed713140aaa50e2eb4afc6b24f9f43b4ca52d7345d" gracePeriod=2 Dec 05 06:39:59 crc kubenswrapper[4742]: I1205 06:39:59.216451 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8qfth" Dec 05 06:39:59 crc kubenswrapper[4742]: I1205 06:39:59.313892 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b4aab07-75be-4124-805b-f7d5f5a5e6d2-catalog-content\") pod \"1b4aab07-75be-4124-805b-f7d5f5a5e6d2\" (UID: \"1b4aab07-75be-4124-805b-f7d5f5a5e6d2\") " Dec 05 06:39:59 crc kubenswrapper[4742]: I1205 06:39:59.313959 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-scclz\" (UniqueName: \"kubernetes.io/projected/1b4aab07-75be-4124-805b-f7d5f5a5e6d2-kube-api-access-scclz\") pod \"1b4aab07-75be-4124-805b-f7d5f5a5e6d2\" (UID: \"1b4aab07-75be-4124-805b-f7d5f5a5e6d2\") " Dec 05 06:39:59 crc kubenswrapper[4742]: I1205 06:39:59.314041 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b4aab07-75be-4124-805b-f7d5f5a5e6d2-utilities\") pod \"1b4aab07-75be-4124-805b-f7d5f5a5e6d2\" (UID: \"1b4aab07-75be-4124-805b-f7d5f5a5e6d2\") " Dec 05 06:39:59 crc kubenswrapper[4742]: I1205 06:39:59.315641 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b4aab07-75be-4124-805b-f7d5f5a5e6d2-utilities" (OuterVolumeSpecName: "utilities") pod "1b4aab07-75be-4124-805b-f7d5f5a5e6d2" (UID: "1b4aab07-75be-4124-805b-f7d5f5a5e6d2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:39:59 crc kubenswrapper[4742]: I1205 06:39:59.321455 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b4aab07-75be-4124-805b-f7d5f5a5e6d2-kube-api-access-scclz" (OuterVolumeSpecName: "kube-api-access-scclz") pod "1b4aab07-75be-4124-805b-f7d5f5a5e6d2" (UID: "1b4aab07-75be-4124-805b-f7d5f5a5e6d2"). InnerVolumeSpecName "kube-api-access-scclz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:39:59 crc kubenswrapper[4742]: I1205 06:39:59.334936 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b4aab07-75be-4124-805b-f7d5f5a5e6d2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1b4aab07-75be-4124-805b-f7d5f5a5e6d2" (UID: "1b4aab07-75be-4124-805b-f7d5f5a5e6d2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:39:59 crc kubenswrapper[4742]: I1205 06:39:59.415572 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b4aab07-75be-4124-805b-f7d5f5a5e6d2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:39:59 crc kubenswrapper[4742]: I1205 06:39:59.415706 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-scclz\" (UniqueName: \"kubernetes.io/projected/1b4aab07-75be-4124-805b-f7d5f5a5e6d2-kube-api-access-scclz\") on node \"crc\" DevicePath \"\"" Dec 05 06:39:59 crc kubenswrapper[4742]: I1205 06:39:59.415762 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b4aab07-75be-4124-805b-f7d5f5a5e6d2-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:39:59 crc kubenswrapper[4742]: I1205 06:39:59.807976 4742 generic.go:334] "Generic (PLEG): container finished" podID="1b4aab07-75be-4124-805b-f7d5f5a5e6d2" containerID="7654d96c211777c537e8d5ed713140aaa50e2eb4afc6b24f9f43b4ca52d7345d" exitCode=0 Dec 05 06:39:59 crc kubenswrapper[4742]: I1205 06:39:59.808023 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8qfth" event={"ID":"1b4aab07-75be-4124-805b-f7d5f5a5e6d2","Type":"ContainerDied","Data":"7654d96c211777c537e8d5ed713140aaa50e2eb4afc6b24f9f43b4ca52d7345d"} Dec 05 06:39:59 crc kubenswrapper[4742]: I1205 06:39:59.808031 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8qfth" Dec 05 06:39:59 crc kubenswrapper[4742]: I1205 06:39:59.808069 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8qfth" event={"ID":"1b4aab07-75be-4124-805b-f7d5f5a5e6d2","Type":"ContainerDied","Data":"9fd31b4c7cac98c3be7295e84f7352b03a2dccb93abe555f7137af98d78e34e6"} Dec 05 06:39:59 crc kubenswrapper[4742]: I1205 06:39:59.808089 4742 scope.go:117] "RemoveContainer" containerID="7654d96c211777c537e8d5ed713140aaa50e2eb4afc6b24f9f43b4ca52d7345d" Dec 05 06:39:59 crc kubenswrapper[4742]: I1205 06:39:59.840253 4742 scope.go:117] "RemoveContainer" containerID="7e73c1b0eeb9afe85908403c92366d29fb19cc28add3b5aabba1450e316ccea4" Dec 05 06:39:59 crc kubenswrapper[4742]: I1205 06:39:59.841692 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8qfth"] Dec 05 06:39:59 crc kubenswrapper[4742]: I1205 06:39:59.847226 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-8qfth"] Dec 05 06:39:59 crc kubenswrapper[4742]: I1205 06:39:59.863534 4742 scope.go:117] "RemoveContainer" containerID="473c1fc6c962233118a9aef3ceaf04298bc8e9460f85192b7f07b7dc56ec14c8" Dec 05 06:39:59 crc kubenswrapper[4742]: I1205 06:39:59.898478 4742 scope.go:117] "RemoveContainer" containerID="7654d96c211777c537e8d5ed713140aaa50e2eb4afc6b24f9f43b4ca52d7345d" Dec 05 06:39:59 crc kubenswrapper[4742]: E1205 06:39:59.898891 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7654d96c211777c537e8d5ed713140aaa50e2eb4afc6b24f9f43b4ca52d7345d\": container with ID starting with 7654d96c211777c537e8d5ed713140aaa50e2eb4afc6b24f9f43b4ca52d7345d not found: ID does not exist" containerID="7654d96c211777c537e8d5ed713140aaa50e2eb4afc6b24f9f43b4ca52d7345d" Dec 05 06:39:59 crc kubenswrapper[4742]: I1205 06:39:59.898961 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7654d96c211777c537e8d5ed713140aaa50e2eb4afc6b24f9f43b4ca52d7345d"} err="failed to get container status \"7654d96c211777c537e8d5ed713140aaa50e2eb4afc6b24f9f43b4ca52d7345d\": rpc error: code = NotFound desc = could not find container \"7654d96c211777c537e8d5ed713140aaa50e2eb4afc6b24f9f43b4ca52d7345d\": container with ID starting with 7654d96c211777c537e8d5ed713140aaa50e2eb4afc6b24f9f43b4ca52d7345d not found: ID does not exist" Dec 05 06:39:59 crc kubenswrapper[4742]: I1205 06:39:59.898995 4742 scope.go:117] "RemoveContainer" containerID="7e73c1b0eeb9afe85908403c92366d29fb19cc28add3b5aabba1450e316ccea4" Dec 05 06:39:59 crc kubenswrapper[4742]: E1205 06:39:59.899753 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e73c1b0eeb9afe85908403c92366d29fb19cc28add3b5aabba1450e316ccea4\": container with ID starting with 7e73c1b0eeb9afe85908403c92366d29fb19cc28add3b5aabba1450e316ccea4 not found: ID does not exist" containerID="7e73c1b0eeb9afe85908403c92366d29fb19cc28add3b5aabba1450e316ccea4" Dec 05 06:39:59 crc kubenswrapper[4742]: I1205 06:39:59.899816 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e73c1b0eeb9afe85908403c92366d29fb19cc28add3b5aabba1450e316ccea4"} err="failed to get container status \"7e73c1b0eeb9afe85908403c92366d29fb19cc28add3b5aabba1450e316ccea4\": rpc error: code = NotFound desc = could not find container \"7e73c1b0eeb9afe85908403c92366d29fb19cc28add3b5aabba1450e316ccea4\": container with ID starting with 7e73c1b0eeb9afe85908403c92366d29fb19cc28add3b5aabba1450e316ccea4 not found: ID does not exist" Dec 05 06:39:59 crc kubenswrapper[4742]: I1205 06:39:59.899858 4742 scope.go:117] "RemoveContainer" containerID="473c1fc6c962233118a9aef3ceaf04298bc8e9460f85192b7f07b7dc56ec14c8" Dec 05 06:39:59 crc kubenswrapper[4742]: E1205 06:39:59.900466 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"473c1fc6c962233118a9aef3ceaf04298bc8e9460f85192b7f07b7dc56ec14c8\": container with ID starting with 473c1fc6c962233118a9aef3ceaf04298bc8e9460f85192b7f07b7dc56ec14c8 not found: ID does not exist" containerID="473c1fc6c962233118a9aef3ceaf04298bc8e9460f85192b7f07b7dc56ec14c8" Dec 05 06:39:59 crc kubenswrapper[4742]: I1205 06:39:59.900499 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"473c1fc6c962233118a9aef3ceaf04298bc8e9460f85192b7f07b7dc56ec14c8"} err="failed to get container status \"473c1fc6c962233118a9aef3ceaf04298bc8e9460f85192b7f07b7dc56ec14c8\": rpc error: code = NotFound desc = could not find container \"473c1fc6c962233118a9aef3ceaf04298bc8e9460f85192b7f07b7dc56ec14c8\": container with ID starting with 473c1fc6c962233118a9aef3ceaf04298bc8e9460f85192b7f07b7dc56ec14c8 not found: ID does not exist" Dec 05 06:40:00 crc kubenswrapper[4742]: I1205 06:40:00.397278 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b4aab07-75be-4124-805b-f7d5f5a5e6d2" path="/var/lib/kubelet/pods/1b4aab07-75be-4124-805b-f7d5f5a5e6d2/volumes" Dec 05 06:40:04 crc kubenswrapper[4742]: I1205 06:40:04.053766 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2md54" Dec 05 06:40:04 crc kubenswrapper[4742]: I1205 06:40:04.054280 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2md54" Dec 05 06:40:04 crc kubenswrapper[4742]: I1205 06:40:04.124587 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2md54" Dec 05 06:40:04 crc kubenswrapper[4742]: I1205 06:40:04.901421 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2md54" Dec 05 06:40:04 crc kubenswrapper[4742]: I1205 06:40:04.959968 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2md54"] Dec 05 06:40:06 crc kubenswrapper[4742]: I1205 06:40:06.871511 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-2md54" podUID="645b74a5-9eeb-4a9f-8f4c-c95007fde3c0" containerName="registry-server" containerID="cri-o://d545d17c148ff40ccde1832183ec916435ef9c09b864600ec4905a695542d9db" gracePeriod=2 Dec 05 06:40:07 crc kubenswrapper[4742]: I1205 06:40:07.884320 4742 generic.go:334] "Generic (PLEG): container finished" podID="645b74a5-9eeb-4a9f-8f4c-c95007fde3c0" containerID="d545d17c148ff40ccde1832183ec916435ef9c09b864600ec4905a695542d9db" exitCode=0 Dec 05 06:40:07 crc kubenswrapper[4742]: I1205 06:40:07.884382 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2md54" event={"ID":"645b74a5-9eeb-4a9f-8f4c-c95007fde3c0","Type":"ContainerDied","Data":"d545d17c148ff40ccde1832183ec916435ef9c09b864600ec4905a695542d9db"} Dec 05 06:40:08 crc kubenswrapper[4742]: I1205 06:40:08.507328 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2md54" Dec 05 06:40:08 crc kubenswrapper[4742]: I1205 06:40:08.661688 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/645b74a5-9eeb-4a9f-8f4c-c95007fde3c0-catalog-content\") pod \"645b74a5-9eeb-4a9f-8f4c-c95007fde3c0\" (UID: \"645b74a5-9eeb-4a9f-8f4c-c95007fde3c0\") " Dec 05 06:40:08 crc kubenswrapper[4742]: I1205 06:40:08.661748 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jzpch\" (UniqueName: \"kubernetes.io/projected/645b74a5-9eeb-4a9f-8f4c-c95007fde3c0-kube-api-access-jzpch\") pod \"645b74a5-9eeb-4a9f-8f4c-c95007fde3c0\" (UID: \"645b74a5-9eeb-4a9f-8f4c-c95007fde3c0\") " Dec 05 06:40:08 crc kubenswrapper[4742]: I1205 06:40:08.661799 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/645b74a5-9eeb-4a9f-8f4c-c95007fde3c0-utilities\") pod \"645b74a5-9eeb-4a9f-8f4c-c95007fde3c0\" (UID: \"645b74a5-9eeb-4a9f-8f4c-c95007fde3c0\") " Dec 05 06:40:08 crc kubenswrapper[4742]: I1205 06:40:08.663638 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/645b74a5-9eeb-4a9f-8f4c-c95007fde3c0-utilities" (OuterVolumeSpecName: "utilities") pod "645b74a5-9eeb-4a9f-8f4c-c95007fde3c0" (UID: "645b74a5-9eeb-4a9f-8f4c-c95007fde3c0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:40:08 crc kubenswrapper[4742]: I1205 06:40:08.668109 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/645b74a5-9eeb-4a9f-8f4c-c95007fde3c0-kube-api-access-jzpch" (OuterVolumeSpecName: "kube-api-access-jzpch") pod "645b74a5-9eeb-4a9f-8f4c-c95007fde3c0" (UID: "645b74a5-9eeb-4a9f-8f4c-c95007fde3c0"). InnerVolumeSpecName "kube-api-access-jzpch". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:40:08 crc kubenswrapper[4742]: I1205 06:40:08.763754 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jzpch\" (UniqueName: \"kubernetes.io/projected/645b74a5-9eeb-4a9f-8f4c-c95007fde3c0-kube-api-access-jzpch\") on node \"crc\" DevicePath \"\"" Dec 05 06:40:08 crc kubenswrapper[4742]: I1205 06:40:08.763808 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/645b74a5-9eeb-4a9f-8f4c-c95007fde3c0-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:40:08 crc kubenswrapper[4742]: I1205 06:40:08.808669 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/645b74a5-9eeb-4a9f-8f4c-c95007fde3c0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "645b74a5-9eeb-4a9f-8f4c-c95007fde3c0" (UID: "645b74a5-9eeb-4a9f-8f4c-c95007fde3c0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:40:08 crc kubenswrapper[4742]: I1205 06:40:08.865148 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/645b74a5-9eeb-4a9f-8f4c-c95007fde3c0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:40:08 crc kubenswrapper[4742]: I1205 06:40:08.896222 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2md54" event={"ID":"645b74a5-9eeb-4a9f-8f4c-c95007fde3c0","Type":"ContainerDied","Data":"2e4206ead1741eb001a273a5a7c7a1557dc39fb035a2ca9a18a8a0498d0b669f"} Dec 05 06:40:08 crc kubenswrapper[4742]: I1205 06:40:08.896285 4742 scope.go:117] "RemoveContainer" containerID="d545d17c148ff40ccde1832183ec916435ef9c09b864600ec4905a695542d9db" Dec 05 06:40:08 crc kubenswrapper[4742]: I1205 06:40:08.896329 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2md54" Dec 05 06:40:08 crc kubenswrapper[4742]: I1205 06:40:08.934675 4742 scope.go:117] "RemoveContainer" containerID="862b138c8ad35afdacf0febed3522e3f0bbdedb7f7144f2aec1b65ab69244b55" Dec 05 06:40:08 crc kubenswrapper[4742]: I1205 06:40:08.936925 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2md54"] Dec 05 06:40:08 crc kubenswrapper[4742]: I1205 06:40:08.946883 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-2md54"] Dec 05 06:40:08 crc kubenswrapper[4742]: I1205 06:40:08.954683 4742 scope.go:117] "RemoveContainer" containerID="c0df4eab234d399599db776fa36e3f29f54721ce55d07f5730242dd701fb2a93" Dec 05 06:40:10 crc kubenswrapper[4742]: I1205 06:40:10.395651 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="645b74a5-9eeb-4a9f-8f4c-c95007fde3c0" path="/var/lib/kubelet/pods/645b74a5-9eeb-4a9f-8f4c-c95007fde3c0/volumes" Dec 05 06:40:10 crc kubenswrapper[4742]: I1205 06:40:10.768960 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vqgrg"] Dec 05 06:40:10 crc kubenswrapper[4742]: E1205 06:40:10.770224 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="645b74a5-9eeb-4a9f-8f4c-c95007fde3c0" containerName="extract-content" Dec 05 06:40:10 crc kubenswrapper[4742]: I1205 06:40:10.770570 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="645b74a5-9eeb-4a9f-8f4c-c95007fde3c0" containerName="extract-content" Dec 05 06:40:10 crc kubenswrapper[4742]: E1205 06:40:10.770714 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b4aab07-75be-4124-805b-f7d5f5a5e6d2" containerName="extract-utilities" Dec 05 06:40:10 crc kubenswrapper[4742]: I1205 06:40:10.770828 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b4aab07-75be-4124-805b-f7d5f5a5e6d2" containerName="extract-utilities" Dec 05 06:40:10 crc kubenswrapper[4742]: E1205 06:40:10.771107 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b4aab07-75be-4124-805b-f7d5f5a5e6d2" containerName="extract-content" Dec 05 06:40:10 crc kubenswrapper[4742]: I1205 06:40:10.771345 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b4aab07-75be-4124-805b-f7d5f5a5e6d2" containerName="extract-content" Dec 05 06:40:10 crc kubenswrapper[4742]: E1205 06:40:10.771504 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="645b74a5-9eeb-4a9f-8f4c-c95007fde3c0" containerName="extract-utilities" Dec 05 06:40:10 crc kubenswrapper[4742]: I1205 06:40:10.771642 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="645b74a5-9eeb-4a9f-8f4c-c95007fde3c0" containerName="extract-utilities" Dec 05 06:40:10 crc kubenswrapper[4742]: E1205 06:40:10.771785 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b4aab07-75be-4124-805b-f7d5f5a5e6d2" containerName="registry-server" Dec 05 06:40:10 crc kubenswrapper[4742]: I1205 06:40:10.771911 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b4aab07-75be-4124-805b-f7d5f5a5e6d2" containerName="registry-server" Dec 05 06:40:10 crc kubenswrapper[4742]: E1205 06:40:10.772128 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="645b74a5-9eeb-4a9f-8f4c-c95007fde3c0" containerName="registry-server" Dec 05 06:40:10 crc kubenswrapper[4742]: I1205 06:40:10.772322 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="645b74a5-9eeb-4a9f-8f4c-c95007fde3c0" containerName="registry-server" Dec 05 06:40:10 crc kubenswrapper[4742]: I1205 06:40:10.772798 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="645b74a5-9eeb-4a9f-8f4c-c95007fde3c0" containerName="registry-server" Dec 05 06:40:10 crc kubenswrapper[4742]: I1205 06:40:10.773019 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b4aab07-75be-4124-805b-f7d5f5a5e6d2" containerName="registry-server" Dec 05 06:40:10 crc kubenswrapper[4742]: I1205 06:40:10.777726 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vqgrg" Dec 05 06:40:10 crc kubenswrapper[4742]: I1205 06:40:10.786235 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vqgrg"] Dec 05 06:40:10 crc kubenswrapper[4742]: I1205 06:40:10.897783 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de08ec02-e9fa-408a-bfb0-677a4b7e654b-utilities\") pod \"community-operators-vqgrg\" (UID: \"de08ec02-e9fa-408a-bfb0-677a4b7e654b\") " pod="openshift-marketplace/community-operators-vqgrg" Dec 05 06:40:10 crc kubenswrapper[4742]: I1205 06:40:10.898086 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ggqbp\" (UniqueName: \"kubernetes.io/projected/de08ec02-e9fa-408a-bfb0-677a4b7e654b-kube-api-access-ggqbp\") pod \"community-operators-vqgrg\" (UID: \"de08ec02-e9fa-408a-bfb0-677a4b7e654b\") " pod="openshift-marketplace/community-operators-vqgrg" Dec 05 06:40:10 crc kubenswrapper[4742]: I1205 06:40:10.898150 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de08ec02-e9fa-408a-bfb0-677a4b7e654b-catalog-content\") pod \"community-operators-vqgrg\" (UID: \"de08ec02-e9fa-408a-bfb0-677a4b7e654b\") " pod="openshift-marketplace/community-operators-vqgrg" Dec 05 06:40:10 crc kubenswrapper[4742]: I1205 06:40:10.998970 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ggqbp\" (UniqueName: \"kubernetes.io/projected/de08ec02-e9fa-408a-bfb0-677a4b7e654b-kube-api-access-ggqbp\") pod \"community-operators-vqgrg\" (UID: \"de08ec02-e9fa-408a-bfb0-677a4b7e654b\") " pod="openshift-marketplace/community-operators-vqgrg" Dec 05 06:40:10 crc kubenswrapper[4742]: I1205 06:40:10.999031 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de08ec02-e9fa-408a-bfb0-677a4b7e654b-catalog-content\") pod \"community-operators-vqgrg\" (UID: \"de08ec02-e9fa-408a-bfb0-677a4b7e654b\") " pod="openshift-marketplace/community-operators-vqgrg" Dec 05 06:40:10 crc kubenswrapper[4742]: I1205 06:40:10.999080 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de08ec02-e9fa-408a-bfb0-677a4b7e654b-utilities\") pod \"community-operators-vqgrg\" (UID: \"de08ec02-e9fa-408a-bfb0-677a4b7e654b\") " pod="openshift-marketplace/community-operators-vqgrg" Dec 05 06:40:10 crc kubenswrapper[4742]: I1205 06:40:10.999541 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de08ec02-e9fa-408a-bfb0-677a4b7e654b-utilities\") pod \"community-operators-vqgrg\" (UID: \"de08ec02-e9fa-408a-bfb0-677a4b7e654b\") " pod="openshift-marketplace/community-operators-vqgrg" Dec 05 06:40:10 crc kubenswrapper[4742]: I1205 06:40:10.999607 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de08ec02-e9fa-408a-bfb0-677a4b7e654b-catalog-content\") pod \"community-operators-vqgrg\" (UID: \"de08ec02-e9fa-408a-bfb0-677a4b7e654b\") " pod="openshift-marketplace/community-operators-vqgrg" Dec 05 06:40:11 crc kubenswrapper[4742]: I1205 06:40:11.018043 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ggqbp\" (UniqueName: \"kubernetes.io/projected/de08ec02-e9fa-408a-bfb0-677a4b7e654b-kube-api-access-ggqbp\") pod \"community-operators-vqgrg\" (UID: \"de08ec02-e9fa-408a-bfb0-677a4b7e654b\") " pod="openshift-marketplace/community-operators-vqgrg" Dec 05 06:40:11 crc kubenswrapper[4742]: I1205 06:40:11.114365 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vqgrg" Dec 05 06:40:11 crc kubenswrapper[4742]: I1205 06:40:11.690090 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vqgrg"] Dec 05 06:40:11 crc kubenswrapper[4742]: I1205 06:40:11.924318 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vqgrg" event={"ID":"de08ec02-e9fa-408a-bfb0-677a4b7e654b","Type":"ContainerStarted","Data":"6cc5176c0304ecff91aa7f6d5cea800bfb5c45cf8f2870e62f25f3b8cf940e0f"} Dec 05 06:40:11 crc kubenswrapper[4742]: I1205 06:40:11.924389 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vqgrg" event={"ID":"de08ec02-e9fa-408a-bfb0-677a4b7e654b","Type":"ContainerStarted","Data":"cfd9025c20d77eca4e65e7045faf37f7bd63ede1b36de9d258f367d450157211"} Dec 05 06:40:12 crc kubenswrapper[4742]: I1205 06:40:12.937760 4742 generic.go:334] "Generic (PLEG): container finished" podID="de08ec02-e9fa-408a-bfb0-677a4b7e654b" containerID="6cc5176c0304ecff91aa7f6d5cea800bfb5c45cf8f2870e62f25f3b8cf940e0f" exitCode=0 Dec 05 06:40:12 crc kubenswrapper[4742]: I1205 06:40:12.942828 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vqgrg" event={"ID":"de08ec02-e9fa-408a-bfb0-677a4b7e654b","Type":"ContainerDied","Data":"6cc5176c0304ecff91aa7f6d5cea800bfb5c45cf8f2870e62f25f3b8cf940e0f"} Dec 05 06:40:13 crc kubenswrapper[4742]: I1205 06:40:13.949080 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vqgrg" event={"ID":"de08ec02-e9fa-408a-bfb0-677a4b7e654b","Type":"ContainerStarted","Data":"561de6d7c77064e490cb9659231f094013c493a037a97edfb441e61ca27e47df"} Dec 05 06:40:14 crc kubenswrapper[4742]: I1205 06:40:14.961396 4742 generic.go:334] "Generic (PLEG): container finished" podID="de08ec02-e9fa-408a-bfb0-677a4b7e654b" containerID="561de6d7c77064e490cb9659231f094013c493a037a97edfb441e61ca27e47df" exitCode=0 Dec 05 06:40:14 crc kubenswrapper[4742]: I1205 06:40:14.961455 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vqgrg" event={"ID":"de08ec02-e9fa-408a-bfb0-677a4b7e654b","Type":"ContainerDied","Data":"561de6d7c77064e490cb9659231f094013c493a037a97edfb441e61ca27e47df"} Dec 05 06:40:15 crc kubenswrapper[4742]: I1205 06:40:15.969647 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vqgrg" event={"ID":"de08ec02-e9fa-408a-bfb0-677a4b7e654b","Type":"ContainerStarted","Data":"ee92c8eba917f1cda03b473c4645bda73e938482318393d5ff87d0b47a722e92"} Dec 05 06:40:15 crc kubenswrapper[4742]: I1205 06:40:15.994781 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vqgrg" podStartSLOduration=3.565218727 podStartE2EDuration="5.994751632s" podCreationTimestamp="2025-12-05 06:40:10 +0000 UTC" firstStartedPulling="2025-12-05 06:40:12.940622496 +0000 UTC m=+2888.852757588" lastFinishedPulling="2025-12-05 06:40:15.370155391 +0000 UTC m=+2891.282290493" observedRunningTime="2025-12-05 06:40:15.991435664 +0000 UTC m=+2891.903570726" watchObservedRunningTime="2025-12-05 06:40:15.994751632 +0000 UTC m=+2891.906886734" Dec 05 06:40:21 crc kubenswrapper[4742]: I1205 06:40:21.120934 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vqgrg" Dec 05 06:40:21 crc kubenswrapper[4742]: I1205 06:40:21.122010 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vqgrg" Dec 05 06:40:21 crc kubenswrapper[4742]: I1205 06:40:21.203855 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vqgrg" Dec 05 06:40:22 crc kubenswrapper[4742]: I1205 06:40:22.096108 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vqgrg" Dec 05 06:40:22 crc kubenswrapper[4742]: I1205 06:40:22.163775 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vqgrg"] Dec 05 06:40:24 crc kubenswrapper[4742]: I1205 06:40:24.045875 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-vqgrg" podUID="de08ec02-e9fa-408a-bfb0-677a4b7e654b" containerName="registry-server" containerID="cri-o://ee92c8eba917f1cda03b473c4645bda73e938482318393d5ff87d0b47a722e92" gracePeriod=2 Dec 05 06:40:24 crc kubenswrapper[4742]: I1205 06:40:24.585922 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vqgrg" Dec 05 06:40:24 crc kubenswrapper[4742]: I1205 06:40:24.641732 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de08ec02-e9fa-408a-bfb0-677a4b7e654b-catalog-content\") pod \"de08ec02-e9fa-408a-bfb0-677a4b7e654b\" (UID: \"de08ec02-e9fa-408a-bfb0-677a4b7e654b\") " Dec 05 06:40:24 crc kubenswrapper[4742]: I1205 06:40:24.641791 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de08ec02-e9fa-408a-bfb0-677a4b7e654b-utilities\") pod \"de08ec02-e9fa-408a-bfb0-677a4b7e654b\" (UID: \"de08ec02-e9fa-408a-bfb0-677a4b7e654b\") " Dec 05 06:40:24 crc kubenswrapper[4742]: I1205 06:40:24.641887 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ggqbp\" (UniqueName: \"kubernetes.io/projected/de08ec02-e9fa-408a-bfb0-677a4b7e654b-kube-api-access-ggqbp\") pod \"de08ec02-e9fa-408a-bfb0-677a4b7e654b\" (UID: \"de08ec02-e9fa-408a-bfb0-677a4b7e654b\") " Dec 05 06:40:24 crc kubenswrapper[4742]: I1205 06:40:24.643125 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de08ec02-e9fa-408a-bfb0-677a4b7e654b-utilities" (OuterVolumeSpecName: "utilities") pod "de08ec02-e9fa-408a-bfb0-677a4b7e654b" (UID: "de08ec02-e9fa-408a-bfb0-677a4b7e654b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:40:24 crc kubenswrapper[4742]: I1205 06:40:24.650310 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de08ec02-e9fa-408a-bfb0-677a4b7e654b-kube-api-access-ggqbp" (OuterVolumeSpecName: "kube-api-access-ggqbp") pod "de08ec02-e9fa-408a-bfb0-677a4b7e654b" (UID: "de08ec02-e9fa-408a-bfb0-677a4b7e654b"). InnerVolumeSpecName "kube-api-access-ggqbp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:40:24 crc kubenswrapper[4742]: I1205 06:40:24.720887 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de08ec02-e9fa-408a-bfb0-677a4b7e654b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "de08ec02-e9fa-408a-bfb0-677a4b7e654b" (UID: "de08ec02-e9fa-408a-bfb0-677a4b7e654b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:40:24 crc kubenswrapper[4742]: I1205 06:40:24.743452 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de08ec02-e9fa-408a-bfb0-677a4b7e654b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:40:24 crc kubenswrapper[4742]: I1205 06:40:24.743496 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de08ec02-e9fa-408a-bfb0-677a4b7e654b-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:40:24 crc kubenswrapper[4742]: I1205 06:40:24.743517 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ggqbp\" (UniqueName: \"kubernetes.io/projected/de08ec02-e9fa-408a-bfb0-677a4b7e654b-kube-api-access-ggqbp\") on node \"crc\" DevicePath \"\"" Dec 05 06:40:25 crc kubenswrapper[4742]: I1205 06:40:25.056592 4742 generic.go:334] "Generic (PLEG): container finished" podID="de08ec02-e9fa-408a-bfb0-677a4b7e654b" containerID="ee92c8eba917f1cda03b473c4645bda73e938482318393d5ff87d0b47a722e92" exitCode=0 Dec 05 06:40:25 crc kubenswrapper[4742]: I1205 06:40:25.056655 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vqgrg" event={"ID":"de08ec02-e9fa-408a-bfb0-677a4b7e654b","Type":"ContainerDied","Data":"ee92c8eba917f1cda03b473c4645bda73e938482318393d5ff87d0b47a722e92"} Dec 05 06:40:25 crc kubenswrapper[4742]: I1205 06:40:25.056665 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vqgrg" Dec 05 06:40:25 crc kubenswrapper[4742]: I1205 06:40:25.056707 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vqgrg" event={"ID":"de08ec02-e9fa-408a-bfb0-677a4b7e654b","Type":"ContainerDied","Data":"cfd9025c20d77eca4e65e7045faf37f7bd63ede1b36de9d258f367d450157211"} Dec 05 06:40:25 crc kubenswrapper[4742]: I1205 06:40:25.056740 4742 scope.go:117] "RemoveContainer" containerID="ee92c8eba917f1cda03b473c4645bda73e938482318393d5ff87d0b47a722e92" Dec 05 06:40:25 crc kubenswrapper[4742]: I1205 06:40:25.087650 4742 scope.go:117] "RemoveContainer" containerID="561de6d7c77064e490cb9659231f094013c493a037a97edfb441e61ca27e47df" Dec 05 06:40:25 crc kubenswrapper[4742]: I1205 06:40:25.096188 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vqgrg"] Dec 05 06:40:25 crc kubenswrapper[4742]: I1205 06:40:25.102821 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-vqgrg"] Dec 05 06:40:25 crc kubenswrapper[4742]: I1205 06:40:25.117004 4742 scope.go:117] "RemoveContainer" containerID="6cc5176c0304ecff91aa7f6d5cea800bfb5c45cf8f2870e62f25f3b8cf940e0f" Dec 05 06:40:25 crc kubenswrapper[4742]: I1205 06:40:25.147419 4742 scope.go:117] "RemoveContainer" containerID="ee92c8eba917f1cda03b473c4645bda73e938482318393d5ff87d0b47a722e92" Dec 05 06:40:25 crc kubenswrapper[4742]: E1205 06:40:25.147864 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee92c8eba917f1cda03b473c4645bda73e938482318393d5ff87d0b47a722e92\": container with ID starting with ee92c8eba917f1cda03b473c4645bda73e938482318393d5ff87d0b47a722e92 not found: ID does not exist" containerID="ee92c8eba917f1cda03b473c4645bda73e938482318393d5ff87d0b47a722e92" Dec 05 06:40:25 crc kubenswrapper[4742]: I1205 06:40:25.147907 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee92c8eba917f1cda03b473c4645bda73e938482318393d5ff87d0b47a722e92"} err="failed to get container status \"ee92c8eba917f1cda03b473c4645bda73e938482318393d5ff87d0b47a722e92\": rpc error: code = NotFound desc = could not find container \"ee92c8eba917f1cda03b473c4645bda73e938482318393d5ff87d0b47a722e92\": container with ID starting with ee92c8eba917f1cda03b473c4645bda73e938482318393d5ff87d0b47a722e92 not found: ID does not exist" Dec 05 06:40:25 crc kubenswrapper[4742]: I1205 06:40:25.147938 4742 scope.go:117] "RemoveContainer" containerID="561de6d7c77064e490cb9659231f094013c493a037a97edfb441e61ca27e47df" Dec 05 06:40:25 crc kubenswrapper[4742]: E1205 06:40:25.148219 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"561de6d7c77064e490cb9659231f094013c493a037a97edfb441e61ca27e47df\": container with ID starting with 561de6d7c77064e490cb9659231f094013c493a037a97edfb441e61ca27e47df not found: ID does not exist" containerID="561de6d7c77064e490cb9659231f094013c493a037a97edfb441e61ca27e47df" Dec 05 06:40:25 crc kubenswrapper[4742]: I1205 06:40:25.148246 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"561de6d7c77064e490cb9659231f094013c493a037a97edfb441e61ca27e47df"} err="failed to get container status \"561de6d7c77064e490cb9659231f094013c493a037a97edfb441e61ca27e47df\": rpc error: code = NotFound desc = could not find container \"561de6d7c77064e490cb9659231f094013c493a037a97edfb441e61ca27e47df\": container with ID starting with 561de6d7c77064e490cb9659231f094013c493a037a97edfb441e61ca27e47df not found: ID does not exist" Dec 05 06:40:25 crc kubenswrapper[4742]: I1205 06:40:25.148262 4742 scope.go:117] "RemoveContainer" containerID="6cc5176c0304ecff91aa7f6d5cea800bfb5c45cf8f2870e62f25f3b8cf940e0f" Dec 05 06:40:25 crc kubenswrapper[4742]: E1205 06:40:25.148462 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6cc5176c0304ecff91aa7f6d5cea800bfb5c45cf8f2870e62f25f3b8cf940e0f\": container with ID starting with 6cc5176c0304ecff91aa7f6d5cea800bfb5c45cf8f2870e62f25f3b8cf940e0f not found: ID does not exist" containerID="6cc5176c0304ecff91aa7f6d5cea800bfb5c45cf8f2870e62f25f3b8cf940e0f" Dec 05 06:40:25 crc kubenswrapper[4742]: I1205 06:40:25.148497 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6cc5176c0304ecff91aa7f6d5cea800bfb5c45cf8f2870e62f25f3b8cf940e0f"} err="failed to get container status \"6cc5176c0304ecff91aa7f6d5cea800bfb5c45cf8f2870e62f25f3b8cf940e0f\": rpc error: code = NotFound desc = could not find container \"6cc5176c0304ecff91aa7f6d5cea800bfb5c45cf8f2870e62f25f3b8cf940e0f\": container with ID starting with 6cc5176c0304ecff91aa7f6d5cea800bfb5c45cf8f2870e62f25f3b8cf940e0f not found: ID does not exist" Dec 05 06:40:26 crc kubenswrapper[4742]: I1205 06:40:26.398244 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de08ec02-e9fa-408a-bfb0-677a4b7e654b" path="/var/lib/kubelet/pods/de08ec02-e9fa-408a-bfb0-677a4b7e654b/volumes" Dec 05 06:41:16 crc kubenswrapper[4742]: I1205 06:41:16.671942 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:41:16 crc kubenswrapper[4742]: I1205 06:41:16.672889 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:41:46 crc kubenswrapper[4742]: I1205 06:41:46.671343 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:41:46 crc kubenswrapper[4742]: I1205 06:41:46.673621 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:42:16 crc kubenswrapper[4742]: I1205 06:42:16.671164 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:42:16 crc kubenswrapper[4742]: I1205 06:42:16.673176 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:42:16 crc kubenswrapper[4742]: I1205 06:42:16.673387 4742 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" Dec 05 06:42:16 crc kubenswrapper[4742]: I1205 06:42:16.674395 4742 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a59a77537625ce4110ff82ab10c7438e7309872fd141eaad289cdc74636f3479"} pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 06:42:16 crc kubenswrapper[4742]: I1205 06:42:16.674718 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" containerID="cri-o://a59a77537625ce4110ff82ab10c7438e7309872fd141eaad289cdc74636f3479" gracePeriod=600 Dec 05 06:42:16 crc kubenswrapper[4742]: E1205 06:42:16.806256 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:42:17 crc kubenswrapper[4742]: I1205 06:42:17.064383 4742 generic.go:334] "Generic (PLEG): container finished" podID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerID="a59a77537625ce4110ff82ab10c7438e7309872fd141eaad289cdc74636f3479" exitCode=0 Dec 05 06:42:17 crc kubenswrapper[4742]: I1205 06:42:17.064447 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerDied","Data":"a59a77537625ce4110ff82ab10c7438e7309872fd141eaad289cdc74636f3479"} Dec 05 06:42:17 crc kubenswrapper[4742]: I1205 06:42:17.064502 4742 scope.go:117] "RemoveContainer" containerID="11632c4bddc650d6853791ae73228e723ef7db19eea7e97ff0f84f7b29e2f3c8" Dec 05 06:42:17 crc kubenswrapper[4742]: I1205 06:42:17.065325 4742 scope.go:117] "RemoveContainer" containerID="a59a77537625ce4110ff82ab10c7438e7309872fd141eaad289cdc74636f3479" Dec 05 06:42:17 crc kubenswrapper[4742]: E1205 06:42:17.065749 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:42:30 crc kubenswrapper[4742]: I1205 06:42:30.384241 4742 scope.go:117] "RemoveContainer" containerID="a59a77537625ce4110ff82ab10c7438e7309872fd141eaad289cdc74636f3479" Dec 05 06:42:30 crc kubenswrapper[4742]: E1205 06:42:30.385336 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:42:43 crc kubenswrapper[4742]: I1205 06:42:43.383784 4742 scope.go:117] "RemoveContainer" containerID="a59a77537625ce4110ff82ab10c7438e7309872fd141eaad289cdc74636f3479" Dec 05 06:42:43 crc kubenswrapper[4742]: E1205 06:42:43.384699 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:42:54 crc kubenswrapper[4742]: I1205 06:42:54.390493 4742 scope.go:117] "RemoveContainer" containerID="a59a77537625ce4110ff82ab10c7438e7309872fd141eaad289cdc74636f3479" Dec 05 06:42:54 crc kubenswrapper[4742]: E1205 06:42:54.391634 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:43:07 crc kubenswrapper[4742]: I1205 06:43:07.383011 4742 scope.go:117] "RemoveContainer" containerID="a59a77537625ce4110ff82ab10c7438e7309872fd141eaad289cdc74636f3479" Dec 05 06:43:07 crc kubenswrapper[4742]: E1205 06:43:07.383919 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:43:18 crc kubenswrapper[4742]: I1205 06:43:18.383311 4742 scope.go:117] "RemoveContainer" containerID="a59a77537625ce4110ff82ab10c7438e7309872fd141eaad289cdc74636f3479" Dec 05 06:43:18 crc kubenswrapper[4742]: E1205 06:43:18.384555 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:43:29 crc kubenswrapper[4742]: I1205 06:43:29.383247 4742 scope.go:117] "RemoveContainer" containerID="a59a77537625ce4110ff82ab10c7438e7309872fd141eaad289cdc74636f3479" Dec 05 06:43:29 crc kubenswrapper[4742]: E1205 06:43:29.383947 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:43:43 crc kubenswrapper[4742]: I1205 06:43:43.383705 4742 scope.go:117] "RemoveContainer" containerID="a59a77537625ce4110ff82ab10c7438e7309872fd141eaad289cdc74636f3479" Dec 05 06:43:43 crc kubenswrapper[4742]: E1205 06:43:43.384813 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:43:49 crc kubenswrapper[4742]: I1205 06:43:49.153548 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-fgl5v"] Dec 05 06:43:49 crc kubenswrapper[4742]: E1205 06:43:49.154779 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de08ec02-e9fa-408a-bfb0-677a4b7e654b" containerName="extract-utilities" Dec 05 06:43:49 crc kubenswrapper[4742]: I1205 06:43:49.154816 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="de08ec02-e9fa-408a-bfb0-677a4b7e654b" containerName="extract-utilities" Dec 05 06:43:49 crc kubenswrapper[4742]: E1205 06:43:49.154859 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de08ec02-e9fa-408a-bfb0-677a4b7e654b" containerName="extract-content" Dec 05 06:43:49 crc kubenswrapper[4742]: I1205 06:43:49.154878 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="de08ec02-e9fa-408a-bfb0-677a4b7e654b" containerName="extract-content" Dec 05 06:43:49 crc kubenswrapper[4742]: E1205 06:43:49.154912 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de08ec02-e9fa-408a-bfb0-677a4b7e654b" containerName="registry-server" Dec 05 06:43:49 crc kubenswrapper[4742]: I1205 06:43:49.154931 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="de08ec02-e9fa-408a-bfb0-677a4b7e654b" containerName="registry-server" Dec 05 06:43:49 crc kubenswrapper[4742]: I1205 06:43:49.155386 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="de08ec02-e9fa-408a-bfb0-677a4b7e654b" containerName="registry-server" Dec 05 06:43:49 crc kubenswrapper[4742]: I1205 06:43:49.159400 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fgl5v" Dec 05 06:43:49 crc kubenswrapper[4742]: I1205 06:43:49.173803 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fgl5v"] Dec 05 06:43:49 crc kubenswrapper[4742]: I1205 06:43:49.313657 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61982040-6c94-428a-a0c3-9ad2d164c73a-catalog-content\") pod \"certified-operators-fgl5v\" (UID: \"61982040-6c94-428a-a0c3-9ad2d164c73a\") " pod="openshift-marketplace/certified-operators-fgl5v" Dec 05 06:43:49 crc kubenswrapper[4742]: I1205 06:43:49.313710 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61982040-6c94-428a-a0c3-9ad2d164c73a-utilities\") pod \"certified-operators-fgl5v\" (UID: \"61982040-6c94-428a-a0c3-9ad2d164c73a\") " pod="openshift-marketplace/certified-operators-fgl5v" Dec 05 06:43:49 crc kubenswrapper[4742]: I1205 06:43:49.313755 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwkvl\" (UniqueName: \"kubernetes.io/projected/61982040-6c94-428a-a0c3-9ad2d164c73a-kube-api-access-hwkvl\") pod \"certified-operators-fgl5v\" (UID: \"61982040-6c94-428a-a0c3-9ad2d164c73a\") " pod="openshift-marketplace/certified-operators-fgl5v" Dec 05 06:43:49 crc kubenswrapper[4742]: I1205 06:43:49.415449 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwkvl\" (UniqueName: \"kubernetes.io/projected/61982040-6c94-428a-a0c3-9ad2d164c73a-kube-api-access-hwkvl\") pod \"certified-operators-fgl5v\" (UID: \"61982040-6c94-428a-a0c3-9ad2d164c73a\") " pod="openshift-marketplace/certified-operators-fgl5v" Dec 05 06:43:49 crc kubenswrapper[4742]: I1205 06:43:49.415584 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61982040-6c94-428a-a0c3-9ad2d164c73a-catalog-content\") pod \"certified-operators-fgl5v\" (UID: \"61982040-6c94-428a-a0c3-9ad2d164c73a\") " pod="openshift-marketplace/certified-operators-fgl5v" Dec 05 06:43:49 crc kubenswrapper[4742]: I1205 06:43:49.415618 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61982040-6c94-428a-a0c3-9ad2d164c73a-utilities\") pod \"certified-operators-fgl5v\" (UID: \"61982040-6c94-428a-a0c3-9ad2d164c73a\") " pod="openshift-marketplace/certified-operators-fgl5v" Dec 05 06:43:49 crc kubenswrapper[4742]: I1205 06:43:49.416205 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61982040-6c94-428a-a0c3-9ad2d164c73a-utilities\") pod \"certified-operators-fgl5v\" (UID: \"61982040-6c94-428a-a0c3-9ad2d164c73a\") " pod="openshift-marketplace/certified-operators-fgl5v" Dec 05 06:43:49 crc kubenswrapper[4742]: I1205 06:43:49.416594 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61982040-6c94-428a-a0c3-9ad2d164c73a-catalog-content\") pod \"certified-operators-fgl5v\" (UID: \"61982040-6c94-428a-a0c3-9ad2d164c73a\") " pod="openshift-marketplace/certified-operators-fgl5v" Dec 05 06:43:49 crc kubenswrapper[4742]: I1205 06:43:49.443295 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwkvl\" (UniqueName: \"kubernetes.io/projected/61982040-6c94-428a-a0c3-9ad2d164c73a-kube-api-access-hwkvl\") pod \"certified-operators-fgl5v\" (UID: \"61982040-6c94-428a-a0c3-9ad2d164c73a\") " pod="openshift-marketplace/certified-operators-fgl5v" Dec 05 06:43:49 crc kubenswrapper[4742]: I1205 06:43:49.511703 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fgl5v" Dec 05 06:43:49 crc kubenswrapper[4742]: I1205 06:43:49.965532 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fgl5v"] Dec 05 06:43:50 crc kubenswrapper[4742]: I1205 06:43:50.967265 4742 generic.go:334] "Generic (PLEG): container finished" podID="61982040-6c94-428a-a0c3-9ad2d164c73a" containerID="0a164fbbdccc5c3b9664b4eb5485f62b153e0919f07b12f9932e2b4fddcbc079" exitCode=0 Dec 05 06:43:50 crc kubenswrapper[4742]: I1205 06:43:50.967586 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fgl5v" event={"ID":"61982040-6c94-428a-a0c3-9ad2d164c73a","Type":"ContainerDied","Data":"0a164fbbdccc5c3b9664b4eb5485f62b153e0919f07b12f9932e2b4fddcbc079"} Dec 05 06:43:50 crc kubenswrapper[4742]: I1205 06:43:50.967687 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fgl5v" event={"ID":"61982040-6c94-428a-a0c3-9ad2d164c73a","Type":"ContainerStarted","Data":"b218201c5b21f58da3c3e095d4779be624069ce33f1caf83f3c335ab79536dfb"} Dec 05 06:43:52 crc kubenswrapper[4742]: I1205 06:43:52.993264 4742 generic.go:334] "Generic (PLEG): container finished" podID="61982040-6c94-428a-a0c3-9ad2d164c73a" containerID="01639c5d3126a80c9f74ac15cfd13c79a1b6255f60d4362bc692291d35cca6ff" exitCode=0 Dec 05 06:43:52 crc kubenswrapper[4742]: I1205 06:43:52.993368 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fgl5v" event={"ID":"61982040-6c94-428a-a0c3-9ad2d164c73a","Type":"ContainerDied","Data":"01639c5d3126a80c9f74ac15cfd13c79a1b6255f60d4362bc692291d35cca6ff"} Dec 05 06:43:54 crc kubenswrapper[4742]: I1205 06:43:54.011663 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fgl5v" event={"ID":"61982040-6c94-428a-a0c3-9ad2d164c73a","Type":"ContainerStarted","Data":"54a2fb08ee2c7fd72a6b2a874b905f23324bacfdf30082286d4dfc927a51e1df"} Dec 05 06:43:54 crc kubenswrapper[4742]: I1205 06:43:54.052841 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-fgl5v" podStartSLOduration=2.6037511269999998 podStartE2EDuration="5.052807844s" podCreationTimestamp="2025-12-05 06:43:49 +0000 UTC" firstStartedPulling="2025-12-05 06:43:50.97022322 +0000 UTC m=+3106.882358332" lastFinishedPulling="2025-12-05 06:43:53.419279957 +0000 UTC m=+3109.331415049" observedRunningTime="2025-12-05 06:43:54.039281086 +0000 UTC m=+3109.951416208" watchObservedRunningTime="2025-12-05 06:43:54.052807844 +0000 UTC m=+3109.964942986" Dec 05 06:43:57 crc kubenswrapper[4742]: I1205 06:43:57.383341 4742 scope.go:117] "RemoveContainer" containerID="a59a77537625ce4110ff82ab10c7438e7309872fd141eaad289cdc74636f3479" Dec 05 06:43:57 crc kubenswrapper[4742]: E1205 06:43:57.383879 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:43:59 crc kubenswrapper[4742]: I1205 06:43:59.513474 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-fgl5v" Dec 05 06:43:59 crc kubenswrapper[4742]: I1205 06:43:59.513556 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-fgl5v" Dec 05 06:43:59 crc kubenswrapper[4742]: I1205 06:43:59.585848 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-fgl5v" Dec 05 06:44:00 crc kubenswrapper[4742]: I1205 06:44:00.123982 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-fgl5v" Dec 05 06:44:00 crc kubenswrapper[4742]: I1205 06:44:00.187020 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fgl5v"] Dec 05 06:44:02 crc kubenswrapper[4742]: I1205 06:44:02.081789 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-fgl5v" podUID="61982040-6c94-428a-a0c3-9ad2d164c73a" containerName="registry-server" containerID="cri-o://54a2fb08ee2c7fd72a6b2a874b905f23324bacfdf30082286d4dfc927a51e1df" gracePeriod=2 Dec 05 06:44:03 crc kubenswrapper[4742]: I1205 06:44:03.089850 4742 generic.go:334] "Generic (PLEG): container finished" podID="61982040-6c94-428a-a0c3-9ad2d164c73a" containerID="54a2fb08ee2c7fd72a6b2a874b905f23324bacfdf30082286d4dfc927a51e1df" exitCode=0 Dec 05 06:44:03 crc kubenswrapper[4742]: I1205 06:44:03.089896 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fgl5v" event={"ID":"61982040-6c94-428a-a0c3-9ad2d164c73a","Type":"ContainerDied","Data":"54a2fb08ee2c7fd72a6b2a874b905f23324bacfdf30082286d4dfc927a51e1df"} Dec 05 06:44:03 crc kubenswrapper[4742]: I1205 06:44:03.202896 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fgl5v" Dec 05 06:44:03 crc kubenswrapper[4742]: I1205 06:44:03.306568 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hwkvl\" (UniqueName: \"kubernetes.io/projected/61982040-6c94-428a-a0c3-9ad2d164c73a-kube-api-access-hwkvl\") pod \"61982040-6c94-428a-a0c3-9ad2d164c73a\" (UID: \"61982040-6c94-428a-a0c3-9ad2d164c73a\") " Dec 05 06:44:03 crc kubenswrapper[4742]: I1205 06:44:03.306644 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61982040-6c94-428a-a0c3-9ad2d164c73a-catalog-content\") pod \"61982040-6c94-428a-a0c3-9ad2d164c73a\" (UID: \"61982040-6c94-428a-a0c3-9ad2d164c73a\") " Dec 05 06:44:03 crc kubenswrapper[4742]: I1205 06:44:03.306736 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61982040-6c94-428a-a0c3-9ad2d164c73a-utilities\") pod \"61982040-6c94-428a-a0c3-9ad2d164c73a\" (UID: \"61982040-6c94-428a-a0c3-9ad2d164c73a\") " Dec 05 06:44:03 crc kubenswrapper[4742]: I1205 06:44:03.307788 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61982040-6c94-428a-a0c3-9ad2d164c73a-utilities" (OuterVolumeSpecName: "utilities") pod "61982040-6c94-428a-a0c3-9ad2d164c73a" (UID: "61982040-6c94-428a-a0c3-9ad2d164c73a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:44:03 crc kubenswrapper[4742]: I1205 06:44:03.313199 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61982040-6c94-428a-a0c3-9ad2d164c73a-kube-api-access-hwkvl" (OuterVolumeSpecName: "kube-api-access-hwkvl") pod "61982040-6c94-428a-a0c3-9ad2d164c73a" (UID: "61982040-6c94-428a-a0c3-9ad2d164c73a"). InnerVolumeSpecName "kube-api-access-hwkvl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:44:03 crc kubenswrapper[4742]: I1205 06:44:03.367872 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61982040-6c94-428a-a0c3-9ad2d164c73a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "61982040-6c94-428a-a0c3-9ad2d164c73a" (UID: "61982040-6c94-428a-a0c3-9ad2d164c73a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:44:03 crc kubenswrapper[4742]: I1205 06:44:03.407780 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61982040-6c94-428a-a0c3-9ad2d164c73a-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:44:03 crc kubenswrapper[4742]: I1205 06:44:03.407808 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hwkvl\" (UniqueName: \"kubernetes.io/projected/61982040-6c94-428a-a0c3-9ad2d164c73a-kube-api-access-hwkvl\") on node \"crc\" DevicePath \"\"" Dec 05 06:44:03 crc kubenswrapper[4742]: I1205 06:44:03.407816 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61982040-6c94-428a-a0c3-9ad2d164c73a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:44:04 crc kubenswrapper[4742]: I1205 06:44:04.105717 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fgl5v" event={"ID":"61982040-6c94-428a-a0c3-9ad2d164c73a","Type":"ContainerDied","Data":"b218201c5b21f58da3c3e095d4779be624069ce33f1caf83f3c335ab79536dfb"} Dec 05 06:44:04 crc kubenswrapper[4742]: I1205 06:44:04.105784 4742 scope.go:117] "RemoveContainer" containerID="54a2fb08ee2c7fd72a6b2a874b905f23324bacfdf30082286d4dfc927a51e1df" Dec 05 06:44:04 crc kubenswrapper[4742]: I1205 06:44:04.105829 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fgl5v" Dec 05 06:44:04 crc kubenswrapper[4742]: I1205 06:44:04.143860 4742 scope.go:117] "RemoveContainer" containerID="01639c5d3126a80c9f74ac15cfd13c79a1b6255f60d4362bc692291d35cca6ff" Dec 05 06:44:04 crc kubenswrapper[4742]: I1205 06:44:04.177319 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fgl5v"] Dec 05 06:44:04 crc kubenswrapper[4742]: I1205 06:44:04.180567 4742 scope.go:117] "RemoveContainer" containerID="0a164fbbdccc5c3b9664b4eb5485f62b153e0919f07b12f9932e2b4fddcbc079" Dec 05 06:44:04 crc kubenswrapper[4742]: I1205 06:44:04.186807 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-fgl5v"] Dec 05 06:44:04 crc kubenswrapper[4742]: I1205 06:44:04.401484 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61982040-6c94-428a-a0c3-9ad2d164c73a" path="/var/lib/kubelet/pods/61982040-6c94-428a-a0c3-9ad2d164c73a/volumes" Dec 05 06:44:09 crc kubenswrapper[4742]: I1205 06:44:09.383370 4742 scope.go:117] "RemoveContainer" containerID="a59a77537625ce4110ff82ab10c7438e7309872fd141eaad289cdc74636f3479" Dec 05 06:44:09 crc kubenswrapper[4742]: E1205 06:44:09.384266 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:44:24 crc kubenswrapper[4742]: I1205 06:44:24.385886 4742 scope.go:117] "RemoveContainer" containerID="a59a77537625ce4110ff82ab10c7438e7309872fd141eaad289cdc74636f3479" Dec 05 06:44:24 crc kubenswrapper[4742]: E1205 06:44:24.386639 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:44:37 crc kubenswrapper[4742]: I1205 06:44:37.383686 4742 scope.go:117] "RemoveContainer" containerID="a59a77537625ce4110ff82ab10c7438e7309872fd141eaad289cdc74636f3479" Dec 05 06:44:37 crc kubenswrapper[4742]: E1205 06:44:37.384750 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:44:51 crc kubenswrapper[4742]: I1205 06:44:51.383090 4742 scope.go:117] "RemoveContainer" containerID="a59a77537625ce4110ff82ab10c7438e7309872fd141eaad289cdc74636f3479" Dec 05 06:44:51 crc kubenswrapper[4742]: E1205 06:44:51.384077 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:45:00 crc kubenswrapper[4742]: I1205 06:45:00.170628 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415285-b8ttm"] Dec 05 06:45:00 crc kubenswrapper[4742]: E1205 06:45:00.172926 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61982040-6c94-428a-a0c3-9ad2d164c73a" containerName="extract-utilities" Dec 05 06:45:00 crc kubenswrapper[4742]: I1205 06:45:00.173045 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="61982040-6c94-428a-a0c3-9ad2d164c73a" containerName="extract-utilities" Dec 05 06:45:00 crc kubenswrapper[4742]: E1205 06:45:00.173182 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61982040-6c94-428a-a0c3-9ad2d164c73a" containerName="registry-server" Dec 05 06:45:00 crc kubenswrapper[4742]: I1205 06:45:00.173292 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="61982040-6c94-428a-a0c3-9ad2d164c73a" containerName="registry-server" Dec 05 06:45:00 crc kubenswrapper[4742]: E1205 06:45:00.173386 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61982040-6c94-428a-a0c3-9ad2d164c73a" containerName="extract-content" Dec 05 06:45:00 crc kubenswrapper[4742]: I1205 06:45:00.173467 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="61982040-6c94-428a-a0c3-9ad2d164c73a" containerName="extract-content" Dec 05 06:45:00 crc kubenswrapper[4742]: I1205 06:45:00.173741 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="61982040-6c94-428a-a0c3-9ad2d164c73a" containerName="registry-server" Dec 05 06:45:00 crc kubenswrapper[4742]: I1205 06:45:00.174443 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-b8ttm" Dec 05 06:45:00 crc kubenswrapper[4742]: I1205 06:45:00.177640 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 06:45:00 crc kubenswrapper[4742]: I1205 06:45:00.179396 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 06:45:00 crc kubenswrapper[4742]: I1205 06:45:00.181003 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415285-b8ttm"] Dec 05 06:45:00 crc kubenswrapper[4742]: I1205 06:45:00.263808 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdqmf\" (UniqueName: \"kubernetes.io/projected/3ff83422-4343-429b-9512-df5fc16b4ae6-kube-api-access-vdqmf\") pod \"collect-profiles-29415285-b8ttm\" (UID: \"3ff83422-4343-429b-9512-df5fc16b4ae6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-b8ttm" Dec 05 06:45:00 crc kubenswrapper[4742]: I1205 06:45:00.263889 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3ff83422-4343-429b-9512-df5fc16b4ae6-secret-volume\") pod \"collect-profiles-29415285-b8ttm\" (UID: \"3ff83422-4343-429b-9512-df5fc16b4ae6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-b8ttm" Dec 05 06:45:00 crc kubenswrapper[4742]: I1205 06:45:00.263975 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3ff83422-4343-429b-9512-df5fc16b4ae6-config-volume\") pod \"collect-profiles-29415285-b8ttm\" (UID: \"3ff83422-4343-429b-9512-df5fc16b4ae6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-b8ttm" Dec 05 06:45:00 crc kubenswrapper[4742]: I1205 06:45:00.365071 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3ff83422-4343-429b-9512-df5fc16b4ae6-config-volume\") pod \"collect-profiles-29415285-b8ttm\" (UID: \"3ff83422-4343-429b-9512-df5fc16b4ae6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-b8ttm" Dec 05 06:45:00 crc kubenswrapper[4742]: I1205 06:45:00.365163 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdqmf\" (UniqueName: \"kubernetes.io/projected/3ff83422-4343-429b-9512-df5fc16b4ae6-kube-api-access-vdqmf\") pod \"collect-profiles-29415285-b8ttm\" (UID: \"3ff83422-4343-429b-9512-df5fc16b4ae6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-b8ttm" Dec 05 06:45:00 crc kubenswrapper[4742]: I1205 06:45:00.365212 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3ff83422-4343-429b-9512-df5fc16b4ae6-secret-volume\") pod \"collect-profiles-29415285-b8ttm\" (UID: \"3ff83422-4343-429b-9512-df5fc16b4ae6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-b8ttm" Dec 05 06:45:00 crc kubenswrapper[4742]: I1205 06:45:00.366992 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3ff83422-4343-429b-9512-df5fc16b4ae6-config-volume\") pod \"collect-profiles-29415285-b8ttm\" (UID: \"3ff83422-4343-429b-9512-df5fc16b4ae6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-b8ttm" Dec 05 06:45:00 crc kubenswrapper[4742]: I1205 06:45:00.371037 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3ff83422-4343-429b-9512-df5fc16b4ae6-secret-volume\") pod \"collect-profiles-29415285-b8ttm\" (UID: \"3ff83422-4343-429b-9512-df5fc16b4ae6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-b8ttm" Dec 05 06:45:00 crc kubenswrapper[4742]: I1205 06:45:00.399085 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdqmf\" (UniqueName: \"kubernetes.io/projected/3ff83422-4343-429b-9512-df5fc16b4ae6-kube-api-access-vdqmf\") pod \"collect-profiles-29415285-b8ttm\" (UID: \"3ff83422-4343-429b-9512-df5fc16b4ae6\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-b8ttm" Dec 05 06:45:00 crc kubenswrapper[4742]: I1205 06:45:00.499279 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-b8ttm" Dec 05 06:45:01 crc kubenswrapper[4742]: W1205 06:45:01.001785 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3ff83422_4343_429b_9512_df5fc16b4ae6.slice/crio-c90706c4da6011e4a214210d2ae71f92dc5c09f8d351ec01269aa93d6966e6cd WatchSource:0}: Error finding container c90706c4da6011e4a214210d2ae71f92dc5c09f8d351ec01269aa93d6966e6cd: Status 404 returned error can't find the container with id c90706c4da6011e4a214210d2ae71f92dc5c09f8d351ec01269aa93d6966e6cd Dec 05 06:45:01 crc kubenswrapper[4742]: I1205 06:45:01.005017 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415285-b8ttm"] Dec 05 06:45:01 crc kubenswrapper[4742]: I1205 06:45:01.681792 4742 generic.go:334] "Generic (PLEG): container finished" podID="3ff83422-4343-429b-9512-df5fc16b4ae6" containerID="5d1fb5c4bbfc1ed04b65147e5f6405b909092c4d787da6939279a2030d20d545" exitCode=0 Dec 05 06:45:01 crc kubenswrapper[4742]: I1205 06:45:01.681851 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-b8ttm" event={"ID":"3ff83422-4343-429b-9512-df5fc16b4ae6","Type":"ContainerDied","Data":"5d1fb5c4bbfc1ed04b65147e5f6405b909092c4d787da6939279a2030d20d545"} Dec 05 06:45:01 crc kubenswrapper[4742]: I1205 06:45:01.682241 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-b8ttm" event={"ID":"3ff83422-4343-429b-9512-df5fc16b4ae6","Type":"ContainerStarted","Data":"c90706c4da6011e4a214210d2ae71f92dc5c09f8d351ec01269aa93d6966e6cd"} Dec 05 06:45:03 crc kubenswrapper[4742]: I1205 06:45:03.090396 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-b8ttm" Dec 05 06:45:03 crc kubenswrapper[4742]: I1205 06:45:03.207691 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3ff83422-4343-429b-9512-df5fc16b4ae6-config-volume\") pod \"3ff83422-4343-429b-9512-df5fc16b4ae6\" (UID: \"3ff83422-4343-429b-9512-df5fc16b4ae6\") " Dec 05 06:45:03 crc kubenswrapper[4742]: I1205 06:45:03.207790 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vdqmf\" (UniqueName: \"kubernetes.io/projected/3ff83422-4343-429b-9512-df5fc16b4ae6-kube-api-access-vdqmf\") pod \"3ff83422-4343-429b-9512-df5fc16b4ae6\" (UID: \"3ff83422-4343-429b-9512-df5fc16b4ae6\") " Dec 05 06:45:03 crc kubenswrapper[4742]: I1205 06:45:03.207909 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3ff83422-4343-429b-9512-df5fc16b4ae6-secret-volume\") pod \"3ff83422-4343-429b-9512-df5fc16b4ae6\" (UID: \"3ff83422-4343-429b-9512-df5fc16b4ae6\") " Dec 05 06:45:03 crc kubenswrapper[4742]: I1205 06:45:03.208734 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ff83422-4343-429b-9512-df5fc16b4ae6-config-volume" (OuterVolumeSpecName: "config-volume") pod "3ff83422-4343-429b-9512-df5fc16b4ae6" (UID: "3ff83422-4343-429b-9512-df5fc16b4ae6"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 06:45:03 crc kubenswrapper[4742]: I1205 06:45:03.213008 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ff83422-4343-429b-9512-df5fc16b4ae6-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "3ff83422-4343-429b-9512-df5fc16b4ae6" (UID: "3ff83422-4343-429b-9512-df5fc16b4ae6"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 06:45:03 crc kubenswrapper[4742]: I1205 06:45:03.214262 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ff83422-4343-429b-9512-df5fc16b4ae6-kube-api-access-vdqmf" (OuterVolumeSpecName: "kube-api-access-vdqmf") pod "3ff83422-4343-429b-9512-df5fc16b4ae6" (UID: "3ff83422-4343-429b-9512-df5fc16b4ae6"). InnerVolumeSpecName "kube-api-access-vdqmf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:45:03 crc kubenswrapper[4742]: I1205 06:45:03.312945 4742 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3ff83422-4343-429b-9512-df5fc16b4ae6-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 06:45:03 crc kubenswrapper[4742]: I1205 06:45:03.313015 4742 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3ff83422-4343-429b-9512-df5fc16b4ae6-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 06:45:03 crc kubenswrapper[4742]: I1205 06:45:03.313037 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vdqmf\" (UniqueName: \"kubernetes.io/projected/3ff83422-4343-429b-9512-df5fc16b4ae6-kube-api-access-vdqmf\") on node \"crc\" DevicePath \"\"" Dec 05 06:45:03 crc kubenswrapper[4742]: I1205 06:45:03.714102 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-b8ttm" event={"ID":"3ff83422-4343-429b-9512-df5fc16b4ae6","Type":"ContainerDied","Data":"c90706c4da6011e4a214210d2ae71f92dc5c09f8d351ec01269aa93d6966e6cd"} Dec 05 06:45:03 crc kubenswrapper[4742]: I1205 06:45:03.714188 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c90706c4da6011e4a214210d2ae71f92dc5c09f8d351ec01269aa93d6966e6cd" Dec 05 06:45:03 crc kubenswrapper[4742]: I1205 06:45:03.714239 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415285-b8ttm" Dec 05 06:45:04 crc kubenswrapper[4742]: I1205 06:45:04.186946 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415240-bmlpw"] Dec 05 06:45:04 crc kubenswrapper[4742]: I1205 06:45:04.193305 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415240-bmlpw"] Dec 05 06:45:04 crc kubenswrapper[4742]: I1205 06:45:04.393169 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa52a9ca-a479-458f-a6f6-073376f06461" path="/var/lib/kubelet/pods/aa52a9ca-a479-458f-a6f6-073376f06461/volumes" Dec 05 06:45:05 crc kubenswrapper[4742]: I1205 06:45:05.383699 4742 scope.go:117] "RemoveContainer" containerID="a59a77537625ce4110ff82ab10c7438e7309872fd141eaad289cdc74636f3479" Dec 05 06:45:05 crc kubenswrapper[4742]: E1205 06:45:05.384184 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:45:13 crc kubenswrapper[4742]: I1205 06:45:13.103827 4742 scope.go:117] "RemoveContainer" containerID="abc38135b9aaf87a7d4f17967c3f97f9ab2929fa85279e4896a5b1a6507ed98e" Dec 05 06:45:17 crc kubenswrapper[4742]: I1205 06:45:17.383446 4742 scope.go:117] "RemoveContainer" containerID="a59a77537625ce4110ff82ab10c7438e7309872fd141eaad289cdc74636f3479" Dec 05 06:45:17 crc kubenswrapper[4742]: E1205 06:45:17.384147 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:45:32 crc kubenswrapper[4742]: I1205 06:45:32.383757 4742 scope.go:117] "RemoveContainer" containerID="a59a77537625ce4110ff82ab10c7438e7309872fd141eaad289cdc74636f3479" Dec 05 06:45:32 crc kubenswrapper[4742]: E1205 06:45:32.384896 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:45:44 crc kubenswrapper[4742]: I1205 06:45:44.390734 4742 scope.go:117] "RemoveContainer" containerID="a59a77537625ce4110ff82ab10c7438e7309872fd141eaad289cdc74636f3479" Dec 05 06:45:44 crc kubenswrapper[4742]: E1205 06:45:44.391669 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:45:56 crc kubenswrapper[4742]: I1205 06:45:56.383354 4742 scope.go:117] "RemoveContainer" containerID="a59a77537625ce4110ff82ab10c7438e7309872fd141eaad289cdc74636f3479" Dec 05 06:45:56 crc kubenswrapper[4742]: E1205 06:45:56.384542 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:46:07 crc kubenswrapper[4742]: I1205 06:46:07.382612 4742 scope.go:117] "RemoveContainer" containerID="a59a77537625ce4110ff82ab10c7438e7309872fd141eaad289cdc74636f3479" Dec 05 06:46:07 crc kubenswrapper[4742]: E1205 06:46:07.383473 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:46:22 crc kubenswrapper[4742]: I1205 06:46:22.382336 4742 scope.go:117] "RemoveContainer" containerID="a59a77537625ce4110ff82ab10c7438e7309872fd141eaad289cdc74636f3479" Dec 05 06:46:22 crc kubenswrapper[4742]: E1205 06:46:22.383095 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:46:34 crc kubenswrapper[4742]: I1205 06:46:34.390263 4742 scope.go:117] "RemoveContainer" containerID="a59a77537625ce4110ff82ab10c7438e7309872fd141eaad289cdc74636f3479" Dec 05 06:46:34 crc kubenswrapper[4742]: E1205 06:46:34.391274 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:46:47 crc kubenswrapper[4742]: I1205 06:46:47.383222 4742 scope.go:117] "RemoveContainer" containerID="a59a77537625ce4110ff82ab10c7438e7309872fd141eaad289cdc74636f3479" Dec 05 06:46:47 crc kubenswrapper[4742]: E1205 06:46:47.384230 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:46:59 crc kubenswrapper[4742]: I1205 06:46:59.384594 4742 scope.go:117] "RemoveContainer" containerID="a59a77537625ce4110ff82ab10c7438e7309872fd141eaad289cdc74636f3479" Dec 05 06:46:59 crc kubenswrapper[4742]: E1205 06:46:59.386299 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:47:12 crc kubenswrapper[4742]: I1205 06:47:12.405207 4742 scope.go:117] "RemoveContainer" containerID="a59a77537625ce4110ff82ab10c7438e7309872fd141eaad289cdc74636f3479" Dec 05 06:47:12 crc kubenswrapper[4742]: E1205 06:47:12.406347 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:47:24 crc kubenswrapper[4742]: I1205 06:47:24.391297 4742 scope.go:117] "RemoveContainer" containerID="a59a77537625ce4110ff82ab10c7438e7309872fd141eaad289cdc74636f3479" Dec 05 06:47:25 crc kubenswrapper[4742]: I1205 06:47:25.025006 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerStarted","Data":"85acd9b237e1245fb3a5d2729d4cd27ec43fd8523e82bf765425c487d144b99d"} Dec 05 06:49:46 crc kubenswrapper[4742]: I1205 06:49:46.671951 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:49:46 crc kubenswrapper[4742]: I1205 06:49:46.672840 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:50:16 crc kubenswrapper[4742]: I1205 06:50:16.671685 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:50:16 crc kubenswrapper[4742]: I1205 06:50:16.672976 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:50:32 crc kubenswrapper[4742]: I1205 06:50:32.450609 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tkddd"] Dec 05 06:50:32 crc kubenswrapper[4742]: E1205 06:50:32.451943 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ff83422-4343-429b-9512-df5fc16b4ae6" containerName="collect-profiles" Dec 05 06:50:32 crc kubenswrapper[4742]: I1205 06:50:32.451976 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ff83422-4343-429b-9512-df5fc16b4ae6" containerName="collect-profiles" Dec 05 06:50:32 crc kubenswrapper[4742]: I1205 06:50:32.452366 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ff83422-4343-429b-9512-df5fc16b4ae6" containerName="collect-profiles" Dec 05 06:50:32 crc kubenswrapper[4742]: I1205 06:50:32.454546 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tkddd" Dec 05 06:50:32 crc kubenswrapper[4742]: I1205 06:50:32.472537 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tkddd"] Dec 05 06:50:32 crc kubenswrapper[4742]: I1205 06:50:32.581534 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1180ee25-a801-446c-98db-117861eddd21-utilities\") pod \"redhat-marketplace-tkddd\" (UID: \"1180ee25-a801-446c-98db-117861eddd21\") " pod="openshift-marketplace/redhat-marketplace-tkddd" Dec 05 06:50:32 crc kubenswrapper[4742]: I1205 06:50:32.581942 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlxrp\" (UniqueName: \"kubernetes.io/projected/1180ee25-a801-446c-98db-117861eddd21-kube-api-access-mlxrp\") pod \"redhat-marketplace-tkddd\" (UID: \"1180ee25-a801-446c-98db-117861eddd21\") " pod="openshift-marketplace/redhat-marketplace-tkddd" Dec 05 06:50:32 crc kubenswrapper[4742]: I1205 06:50:32.582007 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1180ee25-a801-446c-98db-117861eddd21-catalog-content\") pod \"redhat-marketplace-tkddd\" (UID: \"1180ee25-a801-446c-98db-117861eddd21\") " pod="openshift-marketplace/redhat-marketplace-tkddd" Dec 05 06:50:32 crc kubenswrapper[4742]: I1205 06:50:32.683691 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlxrp\" (UniqueName: \"kubernetes.io/projected/1180ee25-a801-446c-98db-117861eddd21-kube-api-access-mlxrp\") pod \"redhat-marketplace-tkddd\" (UID: \"1180ee25-a801-446c-98db-117861eddd21\") " pod="openshift-marketplace/redhat-marketplace-tkddd" Dec 05 06:50:32 crc kubenswrapper[4742]: I1205 06:50:32.683761 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1180ee25-a801-446c-98db-117861eddd21-catalog-content\") pod \"redhat-marketplace-tkddd\" (UID: \"1180ee25-a801-446c-98db-117861eddd21\") " pod="openshift-marketplace/redhat-marketplace-tkddd" Dec 05 06:50:32 crc kubenswrapper[4742]: I1205 06:50:32.683794 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1180ee25-a801-446c-98db-117861eddd21-utilities\") pod \"redhat-marketplace-tkddd\" (UID: \"1180ee25-a801-446c-98db-117861eddd21\") " pod="openshift-marketplace/redhat-marketplace-tkddd" Dec 05 06:50:32 crc kubenswrapper[4742]: I1205 06:50:32.684385 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1180ee25-a801-446c-98db-117861eddd21-utilities\") pod \"redhat-marketplace-tkddd\" (UID: \"1180ee25-a801-446c-98db-117861eddd21\") " pod="openshift-marketplace/redhat-marketplace-tkddd" Dec 05 06:50:32 crc kubenswrapper[4742]: I1205 06:50:32.684524 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1180ee25-a801-446c-98db-117861eddd21-catalog-content\") pod \"redhat-marketplace-tkddd\" (UID: \"1180ee25-a801-446c-98db-117861eddd21\") " pod="openshift-marketplace/redhat-marketplace-tkddd" Dec 05 06:50:32 crc kubenswrapper[4742]: I1205 06:50:32.714650 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlxrp\" (UniqueName: \"kubernetes.io/projected/1180ee25-a801-446c-98db-117861eddd21-kube-api-access-mlxrp\") pod \"redhat-marketplace-tkddd\" (UID: \"1180ee25-a801-446c-98db-117861eddd21\") " pod="openshift-marketplace/redhat-marketplace-tkddd" Dec 05 06:50:32 crc kubenswrapper[4742]: I1205 06:50:32.790167 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tkddd" Dec 05 06:50:33 crc kubenswrapper[4742]: I1205 06:50:33.045160 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tkddd"] Dec 05 06:50:33 crc kubenswrapper[4742]: I1205 06:50:33.823151 4742 generic.go:334] "Generic (PLEG): container finished" podID="1180ee25-a801-446c-98db-117861eddd21" containerID="5ba64379699007687016832dc90360eadf8c303bc650920d6d1646ec9bc33070" exitCode=0 Dec 05 06:50:33 crc kubenswrapper[4742]: I1205 06:50:33.823233 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tkddd" event={"ID":"1180ee25-a801-446c-98db-117861eddd21","Type":"ContainerDied","Data":"5ba64379699007687016832dc90360eadf8c303bc650920d6d1646ec9bc33070"} Dec 05 06:50:33 crc kubenswrapper[4742]: I1205 06:50:33.823570 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tkddd" event={"ID":"1180ee25-a801-446c-98db-117861eddd21","Type":"ContainerStarted","Data":"3ffee6a77bd2d874ba8237c189e676533e6caf4442240142135abefb2ef1ee89"} Dec 05 06:50:33 crc kubenswrapper[4742]: I1205 06:50:33.829269 4742 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 06:50:34 crc kubenswrapper[4742]: I1205 06:50:34.836968 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tkddd" event={"ID":"1180ee25-a801-446c-98db-117861eddd21","Type":"ContainerStarted","Data":"1dd12212301f73d9070bcafe0d9ed8b21168a04243311983dd18ecef5b0bd033"} Dec 05 06:50:35 crc kubenswrapper[4742]: I1205 06:50:35.845986 4742 generic.go:334] "Generic (PLEG): container finished" podID="1180ee25-a801-446c-98db-117861eddd21" containerID="1dd12212301f73d9070bcafe0d9ed8b21168a04243311983dd18ecef5b0bd033" exitCode=0 Dec 05 06:50:35 crc kubenswrapper[4742]: I1205 06:50:35.846028 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tkddd" event={"ID":"1180ee25-a801-446c-98db-117861eddd21","Type":"ContainerDied","Data":"1dd12212301f73d9070bcafe0d9ed8b21168a04243311983dd18ecef5b0bd033"} Dec 05 06:50:36 crc kubenswrapper[4742]: I1205 06:50:36.858085 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tkddd" event={"ID":"1180ee25-a801-446c-98db-117861eddd21","Type":"ContainerStarted","Data":"79c7c6d88dd090b193bfd5028b7346ac7a8b0cb058af81822a926f813b63cdc0"} Dec 05 06:50:36 crc kubenswrapper[4742]: I1205 06:50:36.888280 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tkddd" podStartSLOduration=2.4866144009999998 podStartE2EDuration="4.888232387s" podCreationTimestamp="2025-12-05 06:50:32 +0000 UTC" firstStartedPulling="2025-12-05 06:50:33.828728788 +0000 UTC m=+3509.740863890" lastFinishedPulling="2025-12-05 06:50:36.230346794 +0000 UTC m=+3512.142481876" observedRunningTime="2025-12-05 06:50:36.886677686 +0000 UTC m=+3512.798812798" watchObservedRunningTime="2025-12-05 06:50:36.888232387 +0000 UTC m=+3512.800367489" Dec 05 06:50:42 crc kubenswrapper[4742]: I1205 06:50:42.790986 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tkddd" Dec 05 06:50:42 crc kubenswrapper[4742]: I1205 06:50:42.791870 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tkddd" Dec 05 06:50:42 crc kubenswrapper[4742]: I1205 06:50:42.874624 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tkddd" Dec 05 06:50:42 crc kubenswrapper[4742]: I1205 06:50:42.976453 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tkddd" Dec 05 06:50:43 crc kubenswrapper[4742]: I1205 06:50:43.118458 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tkddd"] Dec 05 06:50:44 crc kubenswrapper[4742]: I1205 06:50:44.933634 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-tkddd" podUID="1180ee25-a801-446c-98db-117861eddd21" containerName="registry-server" containerID="cri-o://79c7c6d88dd090b193bfd5028b7346ac7a8b0cb058af81822a926f813b63cdc0" gracePeriod=2 Dec 05 06:50:45 crc kubenswrapper[4742]: I1205 06:50:45.375471 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tkddd" Dec 05 06:50:45 crc kubenswrapper[4742]: I1205 06:50:45.499885 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1180ee25-a801-446c-98db-117861eddd21-catalog-content\") pod \"1180ee25-a801-446c-98db-117861eddd21\" (UID: \"1180ee25-a801-446c-98db-117861eddd21\") " Dec 05 06:50:45 crc kubenswrapper[4742]: I1205 06:50:45.499942 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mlxrp\" (UniqueName: \"kubernetes.io/projected/1180ee25-a801-446c-98db-117861eddd21-kube-api-access-mlxrp\") pod \"1180ee25-a801-446c-98db-117861eddd21\" (UID: \"1180ee25-a801-446c-98db-117861eddd21\") " Dec 05 06:50:45 crc kubenswrapper[4742]: I1205 06:50:45.499985 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1180ee25-a801-446c-98db-117861eddd21-utilities\") pod \"1180ee25-a801-446c-98db-117861eddd21\" (UID: \"1180ee25-a801-446c-98db-117861eddd21\") " Dec 05 06:50:45 crc kubenswrapper[4742]: I1205 06:50:45.501308 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1180ee25-a801-446c-98db-117861eddd21-utilities" (OuterVolumeSpecName: "utilities") pod "1180ee25-a801-446c-98db-117861eddd21" (UID: "1180ee25-a801-446c-98db-117861eddd21"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:50:45 crc kubenswrapper[4742]: I1205 06:50:45.504966 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1180ee25-a801-446c-98db-117861eddd21-kube-api-access-mlxrp" (OuterVolumeSpecName: "kube-api-access-mlxrp") pod "1180ee25-a801-446c-98db-117861eddd21" (UID: "1180ee25-a801-446c-98db-117861eddd21"). InnerVolumeSpecName "kube-api-access-mlxrp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:50:45 crc kubenswrapper[4742]: I1205 06:50:45.526718 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1180ee25-a801-446c-98db-117861eddd21-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1180ee25-a801-446c-98db-117861eddd21" (UID: "1180ee25-a801-446c-98db-117861eddd21"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:50:45 crc kubenswrapper[4742]: I1205 06:50:45.601835 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1180ee25-a801-446c-98db-117861eddd21-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:50:45 crc kubenswrapper[4742]: I1205 06:50:45.601883 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mlxrp\" (UniqueName: \"kubernetes.io/projected/1180ee25-a801-446c-98db-117861eddd21-kube-api-access-mlxrp\") on node \"crc\" DevicePath \"\"" Dec 05 06:50:45 crc kubenswrapper[4742]: I1205 06:50:45.601905 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1180ee25-a801-446c-98db-117861eddd21-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:50:45 crc kubenswrapper[4742]: I1205 06:50:45.944594 4742 generic.go:334] "Generic (PLEG): container finished" podID="1180ee25-a801-446c-98db-117861eddd21" containerID="79c7c6d88dd090b193bfd5028b7346ac7a8b0cb058af81822a926f813b63cdc0" exitCode=0 Dec 05 06:50:45 crc kubenswrapper[4742]: I1205 06:50:45.944633 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tkddd" event={"ID":"1180ee25-a801-446c-98db-117861eddd21","Type":"ContainerDied","Data":"79c7c6d88dd090b193bfd5028b7346ac7a8b0cb058af81822a926f813b63cdc0"} Dec 05 06:50:45 crc kubenswrapper[4742]: I1205 06:50:45.944659 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tkddd" event={"ID":"1180ee25-a801-446c-98db-117861eddd21","Type":"ContainerDied","Data":"3ffee6a77bd2d874ba8237c189e676533e6caf4442240142135abefb2ef1ee89"} Dec 05 06:50:45 crc kubenswrapper[4742]: I1205 06:50:45.944673 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tkddd" Dec 05 06:50:45 crc kubenswrapper[4742]: I1205 06:50:45.944696 4742 scope.go:117] "RemoveContainer" containerID="79c7c6d88dd090b193bfd5028b7346ac7a8b0cb058af81822a926f813b63cdc0" Dec 05 06:50:46 crc kubenswrapper[4742]: I1205 06:50:45.999644 4742 scope.go:117] "RemoveContainer" containerID="1dd12212301f73d9070bcafe0d9ed8b21168a04243311983dd18ecef5b0bd033" Dec 05 06:50:46 crc kubenswrapper[4742]: I1205 06:50:46.000784 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tkddd"] Dec 05 06:50:46 crc kubenswrapper[4742]: I1205 06:50:46.007255 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-tkddd"] Dec 05 06:50:46 crc kubenswrapper[4742]: I1205 06:50:46.026833 4742 scope.go:117] "RemoveContainer" containerID="5ba64379699007687016832dc90360eadf8c303bc650920d6d1646ec9bc33070" Dec 05 06:50:46 crc kubenswrapper[4742]: I1205 06:50:46.074566 4742 scope.go:117] "RemoveContainer" containerID="79c7c6d88dd090b193bfd5028b7346ac7a8b0cb058af81822a926f813b63cdc0" Dec 05 06:50:46 crc kubenswrapper[4742]: E1205 06:50:46.075132 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79c7c6d88dd090b193bfd5028b7346ac7a8b0cb058af81822a926f813b63cdc0\": container with ID starting with 79c7c6d88dd090b193bfd5028b7346ac7a8b0cb058af81822a926f813b63cdc0 not found: ID does not exist" containerID="79c7c6d88dd090b193bfd5028b7346ac7a8b0cb058af81822a926f813b63cdc0" Dec 05 06:50:46 crc kubenswrapper[4742]: I1205 06:50:46.075186 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79c7c6d88dd090b193bfd5028b7346ac7a8b0cb058af81822a926f813b63cdc0"} err="failed to get container status \"79c7c6d88dd090b193bfd5028b7346ac7a8b0cb058af81822a926f813b63cdc0\": rpc error: code = NotFound desc = could not find container \"79c7c6d88dd090b193bfd5028b7346ac7a8b0cb058af81822a926f813b63cdc0\": container with ID starting with 79c7c6d88dd090b193bfd5028b7346ac7a8b0cb058af81822a926f813b63cdc0 not found: ID does not exist" Dec 05 06:50:46 crc kubenswrapper[4742]: I1205 06:50:46.075222 4742 scope.go:117] "RemoveContainer" containerID="1dd12212301f73d9070bcafe0d9ed8b21168a04243311983dd18ecef5b0bd033" Dec 05 06:50:46 crc kubenswrapper[4742]: E1205 06:50:46.075698 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1dd12212301f73d9070bcafe0d9ed8b21168a04243311983dd18ecef5b0bd033\": container with ID starting with 1dd12212301f73d9070bcafe0d9ed8b21168a04243311983dd18ecef5b0bd033 not found: ID does not exist" containerID="1dd12212301f73d9070bcafe0d9ed8b21168a04243311983dd18ecef5b0bd033" Dec 05 06:50:46 crc kubenswrapper[4742]: I1205 06:50:46.075739 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1dd12212301f73d9070bcafe0d9ed8b21168a04243311983dd18ecef5b0bd033"} err="failed to get container status \"1dd12212301f73d9070bcafe0d9ed8b21168a04243311983dd18ecef5b0bd033\": rpc error: code = NotFound desc = could not find container \"1dd12212301f73d9070bcafe0d9ed8b21168a04243311983dd18ecef5b0bd033\": container with ID starting with 1dd12212301f73d9070bcafe0d9ed8b21168a04243311983dd18ecef5b0bd033 not found: ID does not exist" Dec 05 06:50:46 crc kubenswrapper[4742]: I1205 06:50:46.075789 4742 scope.go:117] "RemoveContainer" containerID="5ba64379699007687016832dc90360eadf8c303bc650920d6d1646ec9bc33070" Dec 05 06:50:46 crc kubenswrapper[4742]: E1205 06:50:46.076289 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ba64379699007687016832dc90360eadf8c303bc650920d6d1646ec9bc33070\": container with ID starting with 5ba64379699007687016832dc90360eadf8c303bc650920d6d1646ec9bc33070 not found: ID does not exist" containerID="5ba64379699007687016832dc90360eadf8c303bc650920d6d1646ec9bc33070" Dec 05 06:50:46 crc kubenswrapper[4742]: I1205 06:50:46.076422 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ba64379699007687016832dc90360eadf8c303bc650920d6d1646ec9bc33070"} err="failed to get container status \"5ba64379699007687016832dc90360eadf8c303bc650920d6d1646ec9bc33070\": rpc error: code = NotFound desc = could not find container \"5ba64379699007687016832dc90360eadf8c303bc650920d6d1646ec9bc33070\": container with ID starting with 5ba64379699007687016832dc90360eadf8c303bc650920d6d1646ec9bc33070 not found: ID does not exist" Dec 05 06:50:46 crc kubenswrapper[4742]: I1205 06:50:46.398681 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1180ee25-a801-446c-98db-117861eddd21" path="/var/lib/kubelet/pods/1180ee25-a801-446c-98db-117861eddd21/volumes" Dec 05 06:50:46 crc kubenswrapper[4742]: I1205 06:50:46.671216 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:50:46 crc kubenswrapper[4742]: I1205 06:50:46.671365 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:50:46 crc kubenswrapper[4742]: I1205 06:50:46.671448 4742 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" Dec 05 06:50:46 crc kubenswrapper[4742]: I1205 06:50:46.672741 4742 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"85acd9b237e1245fb3a5d2729d4cd27ec43fd8523e82bf765425c487d144b99d"} pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 06:50:46 crc kubenswrapper[4742]: I1205 06:50:46.672849 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" containerID="cri-o://85acd9b237e1245fb3a5d2729d4cd27ec43fd8523e82bf765425c487d144b99d" gracePeriod=600 Dec 05 06:50:46 crc kubenswrapper[4742]: I1205 06:50:46.959543 4742 generic.go:334] "Generic (PLEG): container finished" podID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerID="85acd9b237e1245fb3a5d2729d4cd27ec43fd8523e82bf765425c487d144b99d" exitCode=0 Dec 05 06:50:46 crc kubenswrapper[4742]: I1205 06:50:46.959599 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerDied","Data":"85acd9b237e1245fb3a5d2729d4cd27ec43fd8523e82bf765425c487d144b99d"} Dec 05 06:50:46 crc kubenswrapper[4742]: I1205 06:50:46.959681 4742 scope.go:117] "RemoveContainer" containerID="a59a77537625ce4110ff82ab10c7438e7309872fd141eaad289cdc74636f3479" Dec 05 06:50:47 crc kubenswrapper[4742]: I1205 06:50:47.975944 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerStarted","Data":"f1bd02a09f64f1f2bf064242acc3c037ca6f1fb5e20715818edc26bb1cc2883e"} Dec 05 06:51:22 crc kubenswrapper[4742]: I1205 06:51:22.096157 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-chgcx"] Dec 05 06:51:22 crc kubenswrapper[4742]: E1205 06:51:22.097348 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1180ee25-a801-446c-98db-117861eddd21" containerName="extract-content" Dec 05 06:51:22 crc kubenswrapper[4742]: I1205 06:51:22.097382 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="1180ee25-a801-446c-98db-117861eddd21" containerName="extract-content" Dec 05 06:51:22 crc kubenswrapper[4742]: E1205 06:51:22.097424 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1180ee25-a801-446c-98db-117861eddd21" containerName="extract-utilities" Dec 05 06:51:22 crc kubenswrapper[4742]: I1205 06:51:22.097444 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="1180ee25-a801-446c-98db-117861eddd21" containerName="extract-utilities" Dec 05 06:51:22 crc kubenswrapper[4742]: E1205 06:51:22.097497 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1180ee25-a801-446c-98db-117861eddd21" containerName="registry-server" Dec 05 06:51:22 crc kubenswrapper[4742]: I1205 06:51:22.097511 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="1180ee25-a801-446c-98db-117861eddd21" containerName="registry-server" Dec 05 06:51:22 crc kubenswrapper[4742]: I1205 06:51:22.097807 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="1180ee25-a801-446c-98db-117861eddd21" containerName="registry-server" Dec 05 06:51:22 crc kubenswrapper[4742]: I1205 06:51:22.101213 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-chgcx" Dec 05 06:51:22 crc kubenswrapper[4742]: I1205 06:51:22.102291 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-chgcx"] Dec 05 06:51:22 crc kubenswrapper[4742]: I1205 06:51:22.211827 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22ac9d72-872f-4adb-ab8b-3b7d6a154d3f-utilities\") pod \"redhat-operators-chgcx\" (UID: \"22ac9d72-872f-4adb-ab8b-3b7d6a154d3f\") " pod="openshift-marketplace/redhat-operators-chgcx" Dec 05 06:51:22 crc kubenswrapper[4742]: I1205 06:51:22.212133 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22ac9d72-872f-4adb-ab8b-3b7d6a154d3f-catalog-content\") pod \"redhat-operators-chgcx\" (UID: \"22ac9d72-872f-4adb-ab8b-3b7d6a154d3f\") " pod="openshift-marketplace/redhat-operators-chgcx" Dec 05 06:51:22 crc kubenswrapper[4742]: I1205 06:51:22.212250 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrwwc\" (UniqueName: \"kubernetes.io/projected/22ac9d72-872f-4adb-ab8b-3b7d6a154d3f-kube-api-access-qrwwc\") pod \"redhat-operators-chgcx\" (UID: \"22ac9d72-872f-4adb-ab8b-3b7d6a154d3f\") " pod="openshift-marketplace/redhat-operators-chgcx" Dec 05 06:51:22 crc kubenswrapper[4742]: I1205 06:51:22.314290 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22ac9d72-872f-4adb-ab8b-3b7d6a154d3f-catalog-content\") pod \"redhat-operators-chgcx\" (UID: \"22ac9d72-872f-4adb-ab8b-3b7d6a154d3f\") " pod="openshift-marketplace/redhat-operators-chgcx" Dec 05 06:51:22 crc kubenswrapper[4742]: I1205 06:51:22.314353 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrwwc\" (UniqueName: \"kubernetes.io/projected/22ac9d72-872f-4adb-ab8b-3b7d6a154d3f-kube-api-access-qrwwc\") pod \"redhat-operators-chgcx\" (UID: \"22ac9d72-872f-4adb-ab8b-3b7d6a154d3f\") " pod="openshift-marketplace/redhat-operators-chgcx" Dec 05 06:51:22 crc kubenswrapper[4742]: I1205 06:51:22.314436 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22ac9d72-872f-4adb-ab8b-3b7d6a154d3f-utilities\") pod \"redhat-operators-chgcx\" (UID: \"22ac9d72-872f-4adb-ab8b-3b7d6a154d3f\") " pod="openshift-marketplace/redhat-operators-chgcx" Dec 05 06:51:22 crc kubenswrapper[4742]: I1205 06:51:22.315024 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22ac9d72-872f-4adb-ab8b-3b7d6a154d3f-utilities\") pod \"redhat-operators-chgcx\" (UID: \"22ac9d72-872f-4adb-ab8b-3b7d6a154d3f\") " pod="openshift-marketplace/redhat-operators-chgcx" Dec 05 06:51:22 crc kubenswrapper[4742]: I1205 06:51:22.315040 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22ac9d72-872f-4adb-ab8b-3b7d6a154d3f-catalog-content\") pod \"redhat-operators-chgcx\" (UID: \"22ac9d72-872f-4adb-ab8b-3b7d6a154d3f\") " pod="openshift-marketplace/redhat-operators-chgcx" Dec 05 06:51:22 crc kubenswrapper[4742]: I1205 06:51:22.333413 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrwwc\" (UniqueName: \"kubernetes.io/projected/22ac9d72-872f-4adb-ab8b-3b7d6a154d3f-kube-api-access-qrwwc\") pod \"redhat-operators-chgcx\" (UID: \"22ac9d72-872f-4adb-ab8b-3b7d6a154d3f\") " pod="openshift-marketplace/redhat-operators-chgcx" Dec 05 06:51:22 crc kubenswrapper[4742]: I1205 06:51:22.422434 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-chgcx" Dec 05 06:51:22 crc kubenswrapper[4742]: I1205 06:51:22.649555 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-chgcx"] Dec 05 06:51:22 crc kubenswrapper[4742]: W1205 06:51:22.658857 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod22ac9d72_872f_4adb_ab8b_3b7d6a154d3f.slice/crio-ff730dc1948d30e6a4fc49e31d26ef25d263098942b18577c4c4ebaff6e562c9 WatchSource:0}: Error finding container ff730dc1948d30e6a4fc49e31d26ef25d263098942b18577c4c4ebaff6e562c9: Status 404 returned error can't find the container with id ff730dc1948d30e6a4fc49e31d26ef25d263098942b18577c4c4ebaff6e562c9 Dec 05 06:51:23 crc kubenswrapper[4742]: I1205 06:51:23.314818 4742 generic.go:334] "Generic (PLEG): container finished" podID="22ac9d72-872f-4adb-ab8b-3b7d6a154d3f" containerID="f29b4b9f63f31ceca1864ff89e50b764a2c575eff2dbb169e68fda172379c436" exitCode=0 Dec 05 06:51:23 crc kubenswrapper[4742]: I1205 06:51:23.314924 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-chgcx" event={"ID":"22ac9d72-872f-4adb-ab8b-3b7d6a154d3f","Type":"ContainerDied","Data":"f29b4b9f63f31ceca1864ff89e50b764a2c575eff2dbb169e68fda172379c436"} Dec 05 06:51:23 crc kubenswrapper[4742]: I1205 06:51:23.315079 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-chgcx" event={"ID":"22ac9d72-872f-4adb-ab8b-3b7d6a154d3f","Type":"ContainerStarted","Data":"ff730dc1948d30e6a4fc49e31d26ef25d263098942b18577c4c4ebaff6e562c9"} Dec 05 06:51:24 crc kubenswrapper[4742]: I1205 06:51:24.329169 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-chgcx" event={"ID":"22ac9d72-872f-4adb-ab8b-3b7d6a154d3f","Type":"ContainerStarted","Data":"e257515332ba4dee3f79f865e6f6e338a875d020d6e58e991991121032ca78e6"} Dec 05 06:51:25 crc kubenswrapper[4742]: I1205 06:51:25.342821 4742 generic.go:334] "Generic (PLEG): container finished" podID="22ac9d72-872f-4adb-ab8b-3b7d6a154d3f" containerID="e257515332ba4dee3f79f865e6f6e338a875d020d6e58e991991121032ca78e6" exitCode=0 Dec 05 06:51:25 crc kubenswrapper[4742]: I1205 06:51:25.342891 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-chgcx" event={"ID":"22ac9d72-872f-4adb-ab8b-3b7d6a154d3f","Type":"ContainerDied","Data":"e257515332ba4dee3f79f865e6f6e338a875d020d6e58e991991121032ca78e6"} Dec 05 06:51:26 crc kubenswrapper[4742]: I1205 06:51:26.352206 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-chgcx" event={"ID":"22ac9d72-872f-4adb-ab8b-3b7d6a154d3f","Type":"ContainerStarted","Data":"a9506b81bb230dcea33568333b28b83729ed919094e24e01929b098f7df3d7fa"} Dec 05 06:51:32 crc kubenswrapper[4742]: I1205 06:51:32.422608 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-chgcx" Dec 05 06:51:32 crc kubenswrapper[4742]: I1205 06:51:32.423353 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-chgcx" Dec 05 06:51:32 crc kubenswrapper[4742]: I1205 06:51:32.500676 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-chgcx" Dec 05 06:51:32 crc kubenswrapper[4742]: I1205 06:51:32.529866 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-chgcx" podStartSLOduration=8.001273828 podStartE2EDuration="10.529525878s" podCreationTimestamp="2025-12-05 06:51:22 +0000 UTC" firstStartedPulling="2025-12-05 06:51:23.318096974 +0000 UTC m=+3559.230232036" lastFinishedPulling="2025-12-05 06:51:25.846349034 +0000 UTC m=+3561.758484086" observedRunningTime="2025-12-05 06:51:26.374758958 +0000 UTC m=+3562.286894040" watchObservedRunningTime="2025-12-05 06:51:32.529525878 +0000 UTC m=+3568.441660980" Dec 05 06:51:33 crc kubenswrapper[4742]: I1205 06:51:33.475623 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-chgcx" Dec 05 06:51:33 crc kubenswrapper[4742]: I1205 06:51:33.545044 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-chgcx"] Dec 05 06:51:35 crc kubenswrapper[4742]: I1205 06:51:35.429412 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-chgcx" podUID="22ac9d72-872f-4adb-ab8b-3b7d6a154d3f" containerName="registry-server" containerID="cri-o://a9506b81bb230dcea33568333b28b83729ed919094e24e01929b098f7df3d7fa" gracePeriod=2 Dec 05 06:51:37 crc kubenswrapper[4742]: I1205 06:51:37.460954 4742 generic.go:334] "Generic (PLEG): container finished" podID="22ac9d72-872f-4adb-ab8b-3b7d6a154d3f" containerID="a9506b81bb230dcea33568333b28b83729ed919094e24e01929b098f7df3d7fa" exitCode=0 Dec 05 06:51:37 crc kubenswrapper[4742]: I1205 06:51:37.461523 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-chgcx" event={"ID":"22ac9d72-872f-4adb-ab8b-3b7d6a154d3f","Type":"ContainerDied","Data":"a9506b81bb230dcea33568333b28b83729ed919094e24e01929b098f7df3d7fa"} Dec 05 06:51:37 crc kubenswrapper[4742]: I1205 06:51:37.666710 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-chgcx" Dec 05 06:51:37 crc kubenswrapper[4742]: I1205 06:51:37.766156 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qrwwc\" (UniqueName: \"kubernetes.io/projected/22ac9d72-872f-4adb-ab8b-3b7d6a154d3f-kube-api-access-qrwwc\") pod \"22ac9d72-872f-4adb-ab8b-3b7d6a154d3f\" (UID: \"22ac9d72-872f-4adb-ab8b-3b7d6a154d3f\") " Dec 05 06:51:37 crc kubenswrapper[4742]: I1205 06:51:37.766238 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22ac9d72-872f-4adb-ab8b-3b7d6a154d3f-catalog-content\") pod \"22ac9d72-872f-4adb-ab8b-3b7d6a154d3f\" (UID: \"22ac9d72-872f-4adb-ab8b-3b7d6a154d3f\") " Dec 05 06:51:37 crc kubenswrapper[4742]: I1205 06:51:37.766366 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22ac9d72-872f-4adb-ab8b-3b7d6a154d3f-utilities\") pod \"22ac9d72-872f-4adb-ab8b-3b7d6a154d3f\" (UID: \"22ac9d72-872f-4adb-ab8b-3b7d6a154d3f\") " Dec 05 06:51:37 crc kubenswrapper[4742]: I1205 06:51:37.767666 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22ac9d72-872f-4adb-ab8b-3b7d6a154d3f-utilities" (OuterVolumeSpecName: "utilities") pod "22ac9d72-872f-4adb-ab8b-3b7d6a154d3f" (UID: "22ac9d72-872f-4adb-ab8b-3b7d6a154d3f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:51:37 crc kubenswrapper[4742]: I1205 06:51:37.774359 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22ac9d72-872f-4adb-ab8b-3b7d6a154d3f-kube-api-access-qrwwc" (OuterVolumeSpecName: "kube-api-access-qrwwc") pod "22ac9d72-872f-4adb-ab8b-3b7d6a154d3f" (UID: "22ac9d72-872f-4adb-ab8b-3b7d6a154d3f"). InnerVolumeSpecName "kube-api-access-qrwwc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:51:37 crc kubenswrapper[4742]: I1205 06:51:37.867576 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22ac9d72-872f-4adb-ab8b-3b7d6a154d3f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:51:37 crc kubenswrapper[4742]: I1205 06:51:37.867609 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qrwwc\" (UniqueName: \"kubernetes.io/projected/22ac9d72-872f-4adb-ab8b-3b7d6a154d3f-kube-api-access-qrwwc\") on node \"crc\" DevicePath \"\"" Dec 05 06:51:37 crc kubenswrapper[4742]: I1205 06:51:37.888142 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22ac9d72-872f-4adb-ab8b-3b7d6a154d3f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "22ac9d72-872f-4adb-ab8b-3b7d6a154d3f" (UID: "22ac9d72-872f-4adb-ab8b-3b7d6a154d3f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:51:37 crc kubenswrapper[4742]: I1205 06:51:37.968763 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22ac9d72-872f-4adb-ab8b-3b7d6a154d3f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:51:38 crc kubenswrapper[4742]: I1205 06:51:38.472646 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-chgcx" event={"ID":"22ac9d72-872f-4adb-ab8b-3b7d6a154d3f","Type":"ContainerDied","Data":"ff730dc1948d30e6a4fc49e31d26ef25d263098942b18577c4c4ebaff6e562c9"} Dec 05 06:51:38 crc kubenswrapper[4742]: I1205 06:51:38.472716 4742 scope.go:117] "RemoveContainer" containerID="a9506b81bb230dcea33568333b28b83729ed919094e24e01929b098f7df3d7fa" Dec 05 06:51:38 crc kubenswrapper[4742]: I1205 06:51:38.472734 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-chgcx" Dec 05 06:51:38 crc kubenswrapper[4742]: I1205 06:51:38.498397 4742 scope.go:117] "RemoveContainer" containerID="e257515332ba4dee3f79f865e6f6e338a875d020d6e58e991991121032ca78e6" Dec 05 06:51:38 crc kubenswrapper[4742]: I1205 06:51:38.526166 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-chgcx"] Dec 05 06:51:38 crc kubenswrapper[4742]: I1205 06:51:38.534919 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-chgcx"] Dec 05 06:51:38 crc kubenswrapper[4742]: I1205 06:51:38.537420 4742 scope.go:117] "RemoveContainer" containerID="f29b4b9f63f31ceca1864ff89e50b764a2c575eff2dbb169e68fda172379c436" Dec 05 06:51:40 crc kubenswrapper[4742]: I1205 06:51:40.395909 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22ac9d72-872f-4adb-ab8b-3b7d6a154d3f" path="/var/lib/kubelet/pods/22ac9d72-872f-4adb-ab8b-3b7d6a154d3f/volumes" Dec 05 06:53:16 crc kubenswrapper[4742]: I1205 06:53:16.671176 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:53:16 crc kubenswrapper[4742]: I1205 06:53:16.671900 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:53:46 crc kubenswrapper[4742]: I1205 06:53:46.671023 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:53:46 crc kubenswrapper[4742]: I1205 06:53:46.671858 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:54:16 crc kubenswrapper[4742]: I1205 06:54:16.671227 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 06:54:16 crc kubenswrapper[4742]: I1205 06:54:16.671997 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 06:54:16 crc kubenswrapper[4742]: I1205 06:54:16.672111 4742 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" Dec 05 06:54:16 crc kubenswrapper[4742]: I1205 06:54:16.673042 4742 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f1bd02a09f64f1f2bf064242acc3c037ca6f1fb5e20715818edc26bb1cc2883e"} pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 06:54:16 crc kubenswrapper[4742]: I1205 06:54:16.673243 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" containerID="cri-o://f1bd02a09f64f1f2bf064242acc3c037ca6f1fb5e20715818edc26bb1cc2883e" gracePeriod=600 Dec 05 06:54:16 crc kubenswrapper[4742]: E1205 06:54:16.812672 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:54:16 crc kubenswrapper[4742]: I1205 06:54:16.982150 4742 generic.go:334] "Generic (PLEG): container finished" podID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerID="f1bd02a09f64f1f2bf064242acc3c037ca6f1fb5e20715818edc26bb1cc2883e" exitCode=0 Dec 05 06:54:16 crc kubenswrapper[4742]: I1205 06:54:16.982196 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerDied","Data":"f1bd02a09f64f1f2bf064242acc3c037ca6f1fb5e20715818edc26bb1cc2883e"} Dec 05 06:54:16 crc kubenswrapper[4742]: I1205 06:54:16.982228 4742 scope.go:117] "RemoveContainer" containerID="85acd9b237e1245fb3a5d2729d4cd27ec43fd8523e82bf765425c487d144b99d" Dec 05 06:54:16 crc kubenswrapper[4742]: I1205 06:54:16.983961 4742 scope.go:117] "RemoveContainer" containerID="f1bd02a09f64f1f2bf064242acc3c037ca6f1fb5e20715818edc26bb1cc2883e" Dec 05 06:54:16 crc kubenswrapper[4742]: E1205 06:54:16.984277 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:54:32 crc kubenswrapper[4742]: I1205 06:54:32.383950 4742 scope.go:117] "RemoveContainer" containerID="f1bd02a09f64f1f2bf064242acc3c037ca6f1fb5e20715818edc26bb1cc2883e" Dec 05 06:54:32 crc kubenswrapper[4742]: E1205 06:54:32.385124 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:54:47 crc kubenswrapper[4742]: I1205 06:54:47.383113 4742 scope.go:117] "RemoveContainer" containerID="f1bd02a09f64f1f2bf064242acc3c037ca6f1fb5e20715818edc26bb1cc2883e" Dec 05 06:54:47 crc kubenswrapper[4742]: E1205 06:54:47.383949 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:54:57 crc kubenswrapper[4742]: I1205 06:54:57.636973 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rzd9p"] Dec 05 06:54:57 crc kubenswrapper[4742]: E1205 06:54:57.638172 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22ac9d72-872f-4adb-ab8b-3b7d6a154d3f" containerName="extract-utilities" Dec 05 06:54:57 crc kubenswrapper[4742]: I1205 06:54:57.638199 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="22ac9d72-872f-4adb-ab8b-3b7d6a154d3f" containerName="extract-utilities" Dec 05 06:54:57 crc kubenswrapper[4742]: E1205 06:54:57.638256 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22ac9d72-872f-4adb-ab8b-3b7d6a154d3f" containerName="registry-server" Dec 05 06:54:57 crc kubenswrapper[4742]: I1205 06:54:57.638270 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="22ac9d72-872f-4adb-ab8b-3b7d6a154d3f" containerName="registry-server" Dec 05 06:54:57 crc kubenswrapper[4742]: E1205 06:54:57.638290 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22ac9d72-872f-4adb-ab8b-3b7d6a154d3f" containerName="extract-content" Dec 05 06:54:57 crc kubenswrapper[4742]: I1205 06:54:57.638303 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="22ac9d72-872f-4adb-ab8b-3b7d6a154d3f" containerName="extract-content" Dec 05 06:54:57 crc kubenswrapper[4742]: I1205 06:54:57.638583 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="22ac9d72-872f-4adb-ab8b-3b7d6a154d3f" containerName="registry-server" Dec 05 06:54:57 crc kubenswrapper[4742]: I1205 06:54:57.640568 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rzd9p" Dec 05 06:54:57 crc kubenswrapper[4742]: I1205 06:54:57.643546 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rzd9p"] Dec 05 06:54:57 crc kubenswrapper[4742]: I1205 06:54:57.757669 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9565c\" (UniqueName: \"kubernetes.io/projected/1a9a353c-0ad6-46da-9adb-a1c282b32044-kube-api-access-9565c\") pod \"certified-operators-rzd9p\" (UID: \"1a9a353c-0ad6-46da-9adb-a1c282b32044\") " pod="openshift-marketplace/certified-operators-rzd9p" Dec 05 06:54:57 crc kubenswrapper[4742]: I1205 06:54:57.757732 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a9a353c-0ad6-46da-9adb-a1c282b32044-utilities\") pod \"certified-operators-rzd9p\" (UID: \"1a9a353c-0ad6-46da-9adb-a1c282b32044\") " pod="openshift-marketplace/certified-operators-rzd9p" Dec 05 06:54:57 crc kubenswrapper[4742]: I1205 06:54:57.757837 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a9a353c-0ad6-46da-9adb-a1c282b32044-catalog-content\") pod \"certified-operators-rzd9p\" (UID: \"1a9a353c-0ad6-46da-9adb-a1c282b32044\") " pod="openshift-marketplace/certified-operators-rzd9p" Dec 05 06:54:57 crc kubenswrapper[4742]: I1205 06:54:57.858783 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9565c\" (UniqueName: \"kubernetes.io/projected/1a9a353c-0ad6-46da-9adb-a1c282b32044-kube-api-access-9565c\") pod \"certified-operators-rzd9p\" (UID: \"1a9a353c-0ad6-46da-9adb-a1c282b32044\") " pod="openshift-marketplace/certified-operators-rzd9p" Dec 05 06:54:57 crc kubenswrapper[4742]: I1205 06:54:57.858878 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a9a353c-0ad6-46da-9adb-a1c282b32044-utilities\") pod \"certified-operators-rzd9p\" (UID: \"1a9a353c-0ad6-46da-9adb-a1c282b32044\") " pod="openshift-marketplace/certified-operators-rzd9p" Dec 05 06:54:57 crc kubenswrapper[4742]: I1205 06:54:57.858981 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a9a353c-0ad6-46da-9adb-a1c282b32044-catalog-content\") pod \"certified-operators-rzd9p\" (UID: \"1a9a353c-0ad6-46da-9adb-a1c282b32044\") " pod="openshift-marketplace/certified-operators-rzd9p" Dec 05 06:54:57 crc kubenswrapper[4742]: I1205 06:54:57.859628 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a9a353c-0ad6-46da-9adb-a1c282b32044-catalog-content\") pod \"certified-operators-rzd9p\" (UID: \"1a9a353c-0ad6-46da-9adb-a1c282b32044\") " pod="openshift-marketplace/certified-operators-rzd9p" Dec 05 06:54:57 crc kubenswrapper[4742]: I1205 06:54:57.859637 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a9a353c-0ad6-46da-9adb-a1c282b32044-utilities\") pod \"certified-operators-rzd9p\" (UID: \"1a9a353c-0ad6-46da-9adb-a1c282b32044\") " pod="openshift-marketplace/certified-operators-rzd9p" Dec 05 06:54:57 crc kubenswrapper[4742]: I1205 06:54:57.896305 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9565c\" (UniqueName: \"kubernetes.io/projected/1a9a353c-0ad6-46da-9adb-a1c282b32044-kube-api-access-9565c\") pod \"certified-operators-rzd9p\" (UID: \"1a9a353c-0ad6-46da-9adb-a1c282b32044\") " pod="openshift-marketplace/certified-operators-rzd9p" Dec 05 06:54:57 crc kubenswrapper[4742]: I1205 06:54:57.983117 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rzd9p" Dec 05 06:54:58 crc kubenswrapper[4742]: I1205 06:54:58.433224 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rzd9p"] Dec 05 06:54:59 crc kubenswrapper[4742]: I1205 06:54:59.378212 4742 generic.go:334] "Generic (PLEG): container finished" podID="1a9a353c-0ad6-46da-9adb-a1c282b32044" containerID="938b3806ea32abae587fd6c74828043f871dac7687d729ac3c6f1e2cb6563c9b" exitCode=0 Dec 05 06:54:59 crc kubenswrapper[4742]: I1205 06:54:59.378287 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rzd9p" event={"ID":"1a9a353c-0ad6-46da-9adb-a1c282b32044","Type":"ContainerDied","Data":"938b3806ea32abae587fd6c74828043f871dac7687d729ac3c6f1e2cb6563c9b"} Dec 05 06:54:59 crc kubenswrapper[4742]: I1205 06:54:59.378645 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rzd9p" event={"ID":"1a9a353c-0ad6-46da-9adb-a1c282b32044","Type":"ContainerStarted","Data":"903d536539f2edb19056994e0d2a82b3772bb91e0461b37ecaea562e0d002101"} Dec 05 06:55:00 crc kubenswrapper[4742]: I1205 06:55:00.399687 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rzd9p" event={"ID":"1a9a353c-0ad6-46da-9adb-a1c282b32044","Type":"ContainerStarted","Data":"cbb26caeb18ef452b797781a19c6354cb9528c94e652bb322c30d4ec28c8de79"} Dec 05 06:55:01 crc kubenswrapper[4742]: I1205 06:55:01.384098 4742 scope.go:117] "RemoveContainer" containerID="f1bd02a09f64f1f2bf064242acc3c037ca6f1fb5e20715818edc26bb1cc2883e" Dec 05 06:55:01 crc kubenswrapper[4742]: E1205 06:55:01.384761 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:55:01 crc kubenswrapper[4742]: I1205 06:55:01.415724 4742 generic.go:334] "Generic (PLEG): container finished" podID="1a9a353c-0ad6-46da-9adb-a1c282b32044" containerID="cbb26caeb18ef452b797781a19c6354cb9528c94e652bb322c30d4ec28c8de79" exitCode=0 Dec 05 06:55:01 crc kubenswrapper[4742]: I1205 06:55:01.415806 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rzd9p" event={"ID":"1a9a353c-0ad6-46da-9adb-a1c282b32044","Type":"ContainerDied","Data":"cbb26caeb18ef452b797781a19c6354cb9528c94e652bb322c30d4ec28c8de79"} Dec 05 06:55:02 crc kubenswrapper[4742]: I1205 06:55:02.427345 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rzd9p" event={"ID":"1a9a353c-0ad6-46da-9adb-a1c282b32044","Type":"ContainerStarted","Data":"19fc91d30bbc9faac3a7e87e828acede3933226ffaf2c69bfbe9ec3a6cae4252"} Dec 05 06:55:02 crc kubenswrapper[4742]: I1205 06:55:02.465322 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rzd9p" podStartSLOduration=3.010462111 podStartE2EDuration="5.465296698s" podCreationTimestamp="2025-12-05 06:54:57 +0000 UTC" firstStartedPulling="2025-12-05 06:54:59.381200212 +0000 UTC m=+3775.293335284" lastFinishedPulling="2025-12-05 06:55:01.836034769 +0000 UTC m=+3777.748169871" observedRunningTime="2025-12-05 06:55:02.460661606 +0000 UTC m=+3778.372796708" watchObservedRunningTime="2025-12-05 06:55:02.465296698 +0000 UTC m=+3778.377431790" Dec 05 06:55:07 crc kubenswrapper[4742]: I1205 06:55:07.983557 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rzd9p" Dec 05 06:55:07 crc kubenswrapper[4742]: I1205 06:55:07.984269 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rzd9p" Dec 05 06:55:08 crc kubenswrapper[4742]: I1205 06:55:08.050930 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rzd9p" Dec 05 06:55:08 crc kubenswrapper[4742]: I1205 06:55:08.566566 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rzd9p" Dec 05 06:55:08 crc kubenswrapper[4742]: I1205 06:55:08.643019 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rzd9p"] Dec 05 06:55:10 crc kubenswrapper[4742]: I1205 06:55:10.508175 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rzd9p" podUID="1a9a353c-0ad6-46da-9adb-a1c282b32044" containerName="registry-server" containerID="cri-o://19fc91d30bbc9faac3a7e87e828acede3933226ffaf2c69bfbe9ec3a6cae4252" gracePeriod=2 Dec 05 06:55:11 crc kubenswrapper[4742]: I1205 06:55:11.521864 4742 generic.go:334] "Generic (PLEG): container finished" podID="1a9a353c-0ad6-46da-9adb-a1c282b32044" containerID="19fc91d30bbc9faac3a7e87e828acede3933226ffaf2c69bfbe9ec3a6cae4252" exitCode=0 Dec 05 06:55:11 crc kubenswrapper[4742]: I1205 06:55:11.522031 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rzd9p" event={"ID":"1a9a353c-0ad6-46da-9adb-a1c282b32044","Type":"ContainerDied","Data":"19fc91d30bbc9faac3a7e87e828acede3933226ffaf2c69bfbe9ec3a6cae4252"} Dec 05 06:55:11 crc kubenswrapper[4742]: I1205 06:55:11.522324 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rzd9p" event={"ID":"1a9a353c-0ad6-46da-9adb-a1c282b32044","Type":"ContainerDied","Data":"903d536539f2edb19056994e0d2a82b3772bb91e0461b37ecaea562e0d002101"} Dec 05 06:55:11 crc kubenswrapper[4742]: I1205 06:55:11.522348 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="903d536539f2edb19056994e0d2a82b3772bb91e0461b37ecaea562e0d002101" Dec 05 06:55:11 crc kubenswrapper[4742]: I1205 06:55:11.563437 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rzd9p" Dec 05 06:55:11 crc kubenswrapper[4742]: I1205 06:55:11.592808 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a9a353c-0ad6-46da-9adb-a1c282b32044-utilities\") pod \"1a9a353c-0ad6-46da-9adb-a1c282b32044\" (UID: \"1a9a353c-0ad6-46da-9adb-a1c282b32044\") " Dec 05 06:55:11 crc kubenswrapper[4742]: I1205 06:55:11.593171 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a9a353c-0ad6-46da-9adb-a1c282b32044-catalog-content\") pod \"1a9a353c-0ad6-46da-9adb-a1c282b32044\" (UID: \"1a9a353c-0ad6-46da-9adb-a1c282b32044\") " Dec 05 06:55:11 crc kubenswrapper[4742]: I1205 06:55:11.593334 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9565c\" (UniqueName: \"kubernetes.io/projected/1a9a353c-0ad6-46da-9adb-a1c282b32044-kube-api-access-9565c\") pod \"1a9a353c-0ad6-46da-9adb-a1c282b32044\" (UID: \"1a9a353c-0ad6-46da-9adb-a1c282b32044\") " Dec 05 06:55:11 crc kubenswrapper[4742]: I1205 06:55:11.595857 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1a9a353c-0ad6-46da-9adb-a1c282b32044-utilities" (OuterVolumeSpecName: "utilities") pod "1a9a353c-0ad6-46da-9adb-a1c282b32044" (UID: "1a9a353c-0ad6-46da-9adb-a1c282b32044"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:55:11 crc kubenswrapper[4742]: I1205 06:55:11.608017 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a9a353c-0ad6-46da-9adb-a1c282b32044-kube-api-access-9565c" (OuterVolumeSpecName: "kube-api-access-9565c") pod "1a9a353c-0ad6-46da-9adb-a1c282b32044" (UID: "1a9a353c-0ad6-46da-9adb-a1c282b32044"). InnerVolumeSpecName "kube-api-access-9565c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 06:55:11 crc kubenswrapper[4742]: I1205 06:55:11.670886 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1a9a353c-0ad6-46da-9adb-a1c282b32044-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1a9a353c-0ad6-46da-9adb-a1c282b32044" (UID: "1a9a353c-0ad6-46da-9adb-a1c282b32044"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 06:55:11 crc kubenswrapper[4742]: I1205 06:55:11.696362 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a9a353c-0ad6-46da-9adb-a1c282b32044-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 06:55:11 crc kubenswrapper[4742]: I1205 06:55:11.696420 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9565c\" (UniqueName: \"kubernetes.io/projected/1a9a353c-0ad6-46da-9adb-a1c282b32044-kube-api-access-9565c\") on node \"crc\" DevicePath \"\"" Dec 05 06:55:11 crc kubenswrapper[4742]: I1205 06:55:11.696442 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a9a353c-0ad6-46da-9adb-a1c282b32044-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 06:55:12 crc kubenswrapper[4742]: I1205 06:55:12.530866 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rzd9p" Dec 05 06:55:12 crc kubenswrapper[4742]: I1205 06:55:12.559438 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rzd9p"] Dec 05 06:55:12 crc kubenswrapper[4742]: I1205 06:55:12.570590 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rzd9p"] Dec 05 06:55:13 crc kubenswrapper[4742]: I1205 06:55:13.382594 4742 scope.go:117] "RemoveContainer" containerID="f1bd02a09f64f1f2bf064242acc3c037ca6f1fb5e20715818edc26bb1cc2883e" Dec 05 06:55:13 crc kubenswrapper[4742]: E1205 06:55:13.383584 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:55:14 crc kubenswrapper[4742]: I1205 06:55:14.400660 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a9a353c-0ad6-46da-9adb-a1c282b32044" path="/var/lib/kubelet/pods/1a9a353c-0ad6-46da-9adb-a1c282b32044/volumes" Dec 05 06:55:26 crc kubenswrapper[4742]: I1205 06:55:26.383777 4742 scope.go:117] "RemoveContainer" containerID="f1bd02a09f64f1f2bf064242acc3c037ca6f1fb5e20715818edc26bb1cc2883e" Dec 05 06:55:26 crc kubenswrapper[4742]: E1205 06:55:26.384580 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:55:37 crc kubenswrapper[4742]: I1205 06:55:37.382834 4742 scope.go:117] "RemoveContainer" containerID="f1bd02a09f64f1f2bf064242acc3c037ca6f1fb5e20715818edc26bb1cc2883e" Dec 05 06:55:37 crc kubenswrapper[4742]: E1205 06:55:37.383926 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:55:49 crc kubenswrapper[4742]: I1205 06:55:49.383526 4742 scope.go:117] "RemoveContainer" containerID="f1bd02a09f64f1f2bf064242acc3c037ca6f1fb5e20715818edc26bb1cc2883e" Dec 05 06:55:49 crc kubenswrapper[4742]: E1205 06:55:49.384443 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:56:04 crc kubenswrapper[4742]: I1205 06:56:04.404623 4742 scope.go:117] "RemoveContainer" containerID="f1bd02a09f64f1f2bf064242acc3c037ca6f1fb5e20715818edc26bb1cc2883e" Dec 05 06:56:04 crc kubenswrapper[4742]: E1205 06:56:04.406554 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:56:19 crc kubenswrapper[4742]: I1205 06:56:19.383831 4742 scope.go:117] "RemoveContainer" containerID="f1bd02a09f64f1f2bf064242acc3c037ca6f1fb5e20715818edc26bb1cc2883e" Dec 05 06:56:19 crc kubenswrapper[4742]: E1205 06:56:19.384648 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:56:30 crc kubenswrapper[4742]: I1205 06:56:30.382825 4742 scope.go:117] "RemoveContainer" containerID="f1bd02a09f64f1f2bf064242acc3c037ca6f1fb5e20715818edc26bb1cc2883e" Dec 05 06:56:30 crc kubenswrapper[4742]: E1205 06:56:30.383989 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:56:43 crc kubenswrapper[4742]: I1205 06:56:43.382822 4742 scope.go:117] "RemoveContainer" containerID="f1bd02a09f64f1f2bf064242acc3c037ca6f1fb5e20715818edc26bb1cc2883e" Dec 05 06:56:43 crc kubenswrapper[4742]: E1205 06:56:43.383644 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:56:54 crc kubenswrapper[4742]: I1205 06:56:54.390464 4742 scope.go:117] "RemoveContainer" containerID="f1bd02a09f64f1f2bf064242acc3c037ca6f1fb5e20715818edc26bb1cc2883e" Dec 05 06:56:54 crc kubenswrapper[4742]: E1205 06:56:54.391428 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:57:09 crc kubenswrapper[4742]: I1205 06:57:09.382961 4742 scope.go:117] "RemoveContainer" containerID="f1bd02a09f64f1f2bf064242acc3c037ca6f1fb5e20715818edc26bb1cc2883e" Dec 05 06:57:09 crc kubenswrapper[4742]: E1205 06:57:09.384155 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:57:21 crc kubenswrapper[4742]: I1205 06:57:21.383316 4742 scope.go:117] "RemoveContainer" containerID="f1bd02a09f64f1f2bf064242acc3c037ca6f1fb5e20715818edc26bb1cc2883e" Dec 05 06:57:21 crc kubenswrapper[4742]: E1205 06:57:21.384279 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:57:33 crc kubenswrapper[4742]: I1205 06:57:33.382729 4742 scope.go:117] "RemoveContainer" containerID="f1bd02a09f64f1f2bf064242acc3c037ca6f1fb5e20715818edc26bb1cc2883e" Dec 05 06:57:33 crc kubenswrapper[4742]: E1205 06:57:33.383839 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:57:47 crc kubenswrapper[4742]: I1205 06:57:47.383260 4742 scope.go:117] "RemoveContainer" containerID="f1bd02a09f64f1f2bf064242acc3c037ca6f1fb5e20715818edc26bb1cc2883e" Dec 05 06:57:47 crc kubenswrapper[4742]: E1205 06:57:47.384079 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:57:59 crc kubenswrapper[4742]: I1205 06:57:59.382692 4742 scope.go:117] "RemoveContainer" containerID="f1bd02a09f64f1f2bf064242acc3c037ca6f1fb5e20715818edc26bb1cc2883e" Dec 05 06:57:59 crc kubenswrapper[4742]: E1205 06:57:59.383456 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:58:10 crc kubenswrapper[4742]: I1205 06:58:10.383673 4742 scope.go:117] "RemoveContainer" containerID="f1bd02a09f64f1f2bf064242acc3c037ca6f1fb5e20715818edc26bb1cc2883e" Dec 05 06:58:10 crc kubenswrapper[4742]: E1205 06:58:10.384599 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:58:24 crc kubenswrapper[4742]: I1205 06:58:24.391200 4742 scope.go:117] "RemoveContainer" containerID="f1bd02a09f64f1f2bf064242acc3c037ca6f1fb5e20715818edc26bb1cc2883e" Dec 05 06:58:24 crc kubenswrapper[4742]: E1205 06:58:24.392835 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:58:36 crc kubenswrapper[4742]: I1205 06:58:36.383048 4742 scope.go:117] "RemoveContainer" containerID="f1bd02a09f64f1f2bf064242acc3c037ca6f1fb5e20715818edc26bb1cc2883e" Dec 05 06:58:36 crc kubenswrapper[4742]: E1205 06:58:36.384006 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:58:50 crc kubenswrapper[4742]: I1205 06:58:50.382767 4742 scope.go:117] "RemoveContainer" containerID="f1bd02a09f64f1f2bf064242acc3c037ca6f1fb5e20715818edc26bb1cc2883e" Dec 05 06:58:50 crc kubenswrapper[4742]: E1205 06:58:50.383708 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:59:04 crc kubenswrapper[4742]: I1205 06:59:04.391347 4742 scope.go:117] "RemoveContainer" containerID="f1bd02a09f64f1f2bf064242acc3c037ca6f1fb5e20715818edc26bb1cc2883e" Dec 05 06:59:04 crc kubenswrapper[4742]: E1205 06:59:04.392537 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:59:16 crc kubenswrapper[4742]: I1205 06:59:16.382615 4742 scope.go:117] "RemoveContainer" containerID="f1bd02a09f64f1f2bf064242acc3c037ca6f1fb5e20715818edc26bb1cc2883e" Dec 05 06:59:16 crc kubenswrapper[4742]: E1205 06:59:16.383541 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 06:59:30 crc kubenswrapper[4742]: I1205 06:59:30.383848 4742 scope.go:117] "RemoveContainer" containerID="f1bd02a09f64f1f2bf064242acc3c037ca6f1fb5e20715818edc26bb1cc2883e" Dec 05 06:59:30 crc kubenswrapper[4742]: I1205 06:59:30.928662 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerStarted","Data":"cb9a27d77770ffff4e58c7fb9055f8e2b0e937d58b32eb42a5731a840005f97c"} Dec 05 07:00:00 crc kubenswrapper[4742]: I1205 07:00:00.200140 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415300-glrv5"] Dec 05 07:00:00 crc kubenswrapper[4742]: E1205 07:00:00.201037 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a9a353c-0ad6-46da-9adb-a1c282b32044" containerName="registry-server" Dec 05 07:00:00 crc kubenswrapper[4742]: I1205 07:00:00.201074 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a9a353c-0ad6-46da-9adb-a1c282b32044" containerName="registry-server" Dec 05 07:00:00 crc kubenswrapper[4742]: E1205 07:00:00.201090 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a9a353c-0ad6-46da-9adb-a1c282b32044" containerName="extract-content" Dec 05 07:00:00 crc kubenswrapper[4742]: I1205 07:00:00.201099 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a9a353c-0ad6-46da-9adb-a1c282b32044" containerName="extract-content" Dec 05 07:00:00 crc kubenswrapper[4742]: E1205 07:00:00.201118 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a9a353c-0ad6-46da-9adb-a1c282b32044" containerName="extract-utilities" Dec 05 07:00:00 crc kubenswrapper[4742]: I1205 07:00:00.201128 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a9a353c-0ad6-46da-9adb-a1c282b32044" containerName="extract-utilities" Dec 05 07:00:00 crc kubenswrapper[4742]: I1205 07:00:00.201318 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a9a353c-0ad6-46da-9adb-a1c282b32044" containerName="registry-server" Dec 05 07:00:00 crc kubenswrapper[4742]: I1205 07:00:00.201885 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415300-glrv5" Dec 05 07:00:00 crc kubenswrapper[4742]: I1205 07:00:00.205252 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 07:00:00 crc kubenswrapper[4742]: I1205 07:00:00.205465 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 07:00:00 crc kubenswrapper[4742]: I1205 07:00:00.207415 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415300-glrv5"] Dec 05 07:00:00 crc kubenswrapper[4742]: I1205 07:00:00.294132 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/171b42e5-2c6f-4846-898d-02b1411894a5-secret-volume\") pod \"collect-profiles-29415300-glrv5\" (UID: \"171b42e5-2c6f-4846-898d-02b1411894a5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415300-glrv5" Dec 05 07:00:00 crc kubenswrapper[4742]: I1205 07:00:00.294313 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/171b42e5-2c6f-4846-898d-02b1411894a5-config-volume\") pod \"collect-profiles-29415300-glrv5\" (UID: \"171b42e5-2c6f-4846-898d-02b1411894a5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415300-glrv5" Dec 05 07:00:00 crc kubenswrapper[4742]: I1205 07:00:00.294454 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7kzc\" (UniqueName: \"kubernetes.io/projected/171b42e5-2c6f-4846-898d-02b1411894a5-kube-api-access-h7kzc\") pod \"collect-profiles-29415300-glrv5\" (UID: \"171b42e5-2c6f-4846-898d-02b1411894a5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415300-glrv5" Dec 05 07:00:00 crc kubenswrapper[4742]: I1205 07:00:00.396190 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/171b42e5-2c6f-4846-898d-02b1411894a5-secret-volume\") pod \"collect-profiles-29415300-glrv5\" (UID: \"171b42e5-2c6f-4846-898d-02b1411894a5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415300-glrv5" Dec 05 07:00:00 crc kubenswrapper[4742]: I1205 07:00:00.396252 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/171b42e5-2c6f-4846-898d-02b1411894a5-config-volume\") pod \"collect-profiles-29415300-glrv5\" (UID: \"171b42e5-2c6f-4846-898d-02b1411894a5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415300-glrv5" Dec 05 07:00:00 crc kubenswrapper[4742]: I1205 07:00:00.396271 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7kzc\" (UniqueName: \"kubernetes.io/projected/171b42e5-2c6f-4846-898d-02b1411894a5-kube-api-access-h7kzc\") pod \"collect-profiles-29415300-glrv5\" (UID: \"171b42e5-2c6f-4846-898d-02b1411894a5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415300-glrv5" Dec 05 07:00:00 crc kubenswrapper[4742]: I1205 07:00:00.397386 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/171b42e5-2c6f-4846-898d-02b1411894a5-config-volume\") pod \"collect-profiles-29415300-glrv5\" (UID: \"171b42e5-2c6f-4846-898d-02b1411894a5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415300-glrv5" Dec 05 07:00:00 crc kubenswrapper[4742]: I1205 07:00:00.408789 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/171b42e5-2c6f-4846-898d-02b1411894a5-secret-volume\") pod \"collect-profiles-29415300-glrv5\" (UID: \"171b42e5-2c6f-4846-898d-02b1411894a5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415300-glrv5" Dec 05 07:00:00 crc kubenswrapper[4742]: I1205 07:00:00.416008 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7kzc\" (UniqueName: \"kubernetes.io/projected/171b42e5-2c6f-4846-898d-02b1411894a5-kube-api-access-h7kzc\") pod \"collect-profiles-29415300-glrv5\" (UID: \"171b42e5-2c6f-4846-898d-02b1411894a5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415300-glrv5" Dec 05 07:00:00 crc kubenswrapper[4742]: I1205 07:00:00.528097 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415300-glrv5" Dec 05 07:00:01 crc kubenswrapper[4742]: I1205 07:00:01.029371 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415300-glrv5"] Dec 05 07:00:01 crc kubenswrapper[4742]: W1205 07:00:01.036369 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod171b42e5_2c6f_4846_898d_02b1411894a5.slice/crio-b1825d6e3892a5a1b05117b45953f617cdf5c968aaf9ba7251969df253f9bb44 WatchSource:0}: Error finding container b1825d6e3892a5a1b05117b45953f617cdf5c968aaf9ba7251969df253f9bb44: Status 404 returned error can't find the container with id b1825d6e3892a5a1b05117b45953f617cdf5c968aaf9ba7251969df253f9bb44 Dec 05 07:00:01 crc kubenswrapper[4742]: I1205 07:00:01.221008 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415300-glrv5" event={"ID":"171b42e5-2c6f-4846-898d-02b1411894a5","Type":"ContainerStarted","Data":"b1825d6e3892a5a1b05117b45953f617cdf5c968aaf9ba7251969df253f9bb44"} Dec 05 07:00:01 crc kubenswrapper[4742]: I1205 07:00:01.815496 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hngqx"] Dec 05 07:00:01 crc kubenswrapper[4742]: I1205 07:00:01.819450 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hngqx" Dec 05 07:00:01 crc kubenswrapper[4742]: I1205 07:00:01.831464 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hngqx"] Dec 05 07:00:01 crc kubenswrapper[4742]: I1205 07:00:01.921777 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wfbhx\" (UniqueName: \"kubernetes.io/projected/2fc4dcc0-46a2-47d9-8d53-ee7457284c3c-kube-api-access-wfbhx\") pod \"community-operators-hngqx\" (UID: \"2fc4dcc0-46a2-47d9-8d53-ee7457284c3c\") " pod="openshift-marketplace/community-operators-hngqx" Dec 05 07:00:01 crc kubenswrapper[4742]: I1205 07:00:01.921852 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fc4dcc0-46a2-47d9-8d53-ee7457284c3c-catalog-content\") pod \"community-operators-hngqx\" (UID: \"2fc4dcc0-46a2-47d9-8d53-ee7457284c3c\") " pod="openshift-marketplace/community-operators-hngqx" Dec 05 07:00:01 crc kubenswrapper[4742]: I1205 07:00:01.921910 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fc4dcc0-46a2-47d9-8d53-ee7457284c3c-utilities\") pod \"community-operators-hngqx\" (UID: \"2fc4dcc0-46a2-47d9-8d53-ee7457284c3c\") " pod="openshift-marketplace/community-operators-hngqx" Dec 05 07:00:02 crc kubenswrapper[4742]: I1205 07:00:02.023701 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wfbhx\" (UniqueName: \"kubernetes.io/projected/2fc4dcc0-46a2-47d9-8d53-ee7457284c3c-kube-api-access-wfbhx\") pod \"community-operators-hngqx\" (UID: \"2fc4dcc0-46a2-47d9-8d53-ee7457284c3c\") " pod="openshift-marketplace/community-operators-hngqx" Dec 05 07:00:02 crc kubenswrapper[4742]: I1205 07:00:02.023797 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fc4dcc0-46a2-47d9-8d53-ee7457284c3c-catalog-content\") pod \"community-operators-hngqx\" (UID: \"2fc4dcc0-46a2-47d9-8d53-ee7457284c3c\") " pod="openshift-marketplace/community-operators-hngqx" Dec 05 07:00:02 crc kubenswrapper[4742]: I1205 07:00:02.023849 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fc4dcc0-46a2-47d9-8d53-ee7457284c3c-utilities\") pod \"community-operators-hngqx\" (UID: \"2fc4dcc0-46a2-47d9-8d53-ee7457284c3c\") " pod="openshift-marketplace/community-operators-hngqx" Dec 05 07:00:02 crc kubenswrapper[4742]: I1205 07:00:02.024367 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fc4dcc0-46a2-47d9-8d53-ee7457284c3c-utilities\") pod \"community-operators-hngqx\" (UID: \"2fc4dcc0-46a2-47d9-8d53-ee7457284c3c\") " pod="openshift-marketplace/community-operators-hngqx" Dec 05 07:00:02 crc kubenswrapper[4742]: I1205 07:00:02.024426 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fc4dcc0-46a2-47d9-8d53-ee7457284c3c-catalog-content\") pod \"community-operators-hngqx\" (UID: \"2fc4dcc0-46a2-47d9-8d53-ee7457284c3c\") " pod="openshift-marketplace/community-operators-hngqx" Dec 05 07:00:02 crc kubenswrapper[4742]: I1205 07:00:02.045016 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wfbhx\" (UniqueName: \"kubernetes.io/projected/2fc4dcc0-46a2-47d9-8d53-ee7457284c3c-kube-api-access-wfbhx\") pod \"community-operators-hngqx\" (UID: \"2fc4dcc0-46a2-47d9-8d53-ee7457284c3c\") " pod="openshift-marketplace/community-operators-hngqx" Dec 05 07:00:02 crc kubenswrapper[4742]: I1205 07:00:02.156468 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hngqx" Dec 05 07:00:02 crc kubenswrapper[4742]: I1205 07:00:02.234950 4742 generic.go:334] "Generic (PLEG): container finished" podID="171b42e5-2c6f-4846-898d-02b1411894a5" containerID="7a54a4a2e49de31b5b9ed88415f5302af53e2864ae7560a19807414c25ed7553" exitCode=0 Dec 05 07:00:02 crc kubenswrapper[4742]: I1205 07:00:02.235016 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415300-glrv5" event={"ID":"171b42e5-2c6f-4846-898d-02b1411894a5","Type":"ContainerDied","Data":"7a54a4a2e49de31b5b9ed88415f5302af53e2864ae7560a19807414c25ed7553"} Dec 05 07:00:02 crc kubenswrapper[4742]: I1205 07:00:02.694950 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hngqx"] Dec 05 07:00:03 crc kubenswrapper[4742]: I1205 07:00:03.248404 4742 generic.go:334] "Generic (PLEG): container finished" podID="2fc4dcc0-46a2-47d9-8d53-ee7457284c3c" containerID="28bac1fddad26bbf7619dfde672c85fe4c9062776e9ac1d6daa1fc987b0e4ec7" exitCode=0 Dec 05 07:00:03 crc kubenswrapper[4742]: I1205 07:00:03.248482 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hngqx" event={"ID":"2fc4dcc0-46a2-47d9-8d53-ee7457284c3c","Type":"ContainerDied","Data":"28bac1fddad26bbf7619dfde672c85fe4c9062776e9ac1d6daa1fc987b0e4ec7"} Dec 05 07:00:03 crc kubenswrapper[4742]: I1205 07:00:03.248903 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hngqx" event={"ID":"2fc4dcc0-46a2-47d9-8d53-ee7457284c3c","Type":"ContainerStarted","Data":"b98dedccbb852ff5f09f39df4a159899162323b385e7fd106c2f710737a4d063"} Dec 05 07:00:03 crc kubenswrapper[4742]: I1205 07:00:03.251361 4742 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 07:00:03 crc kubenswrapper[4742]: I1205 07:00:03.629707 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415300-glrv5" Dec 05 07:00:03 crc kubenswrapper[4742]: I1205 07:00:03.778449 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7kzc\" (UniqueName: \"kubernetes.io/projected/171b42e5-2c6f-4846-898d-02b1411894a5-kube-api-access-h7kzc\") pod \"171b42e5-2c6f-4846-898d-02b1411894a5\" (UID: \"171b42e5-2c6f-4846-898d-02b1411894a5\") " Dec 05 07:00:03 crc kubenswrapper[4742]: I1205 07:00:03.778675 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/171b42e5-2c6f-4846-898d-02b1411894a5-secret-volume\") pod \"171b42e5-2c6f-4846-898d-02b1411894a5\" (UID: \"171b42e5-2c6f-4846-898d-02b1411894a5\") " Dec 05 07:00:03 crc kubenswrapper[4742]: I1205 07:00:03.778775 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/171b42e5-2c6f-4846-898d-02b1411894a5-config-volume\") pod \"171b42e5-2c6f-4846-898d-02b1411894a5\" (UID: \"171b42e5-2c6f-4846-898d-02b1411894a5\") " Dec 05 07:00:03 crc kubenswrapper[4742]: I1205 07:00:03.780143 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/171b42e5-2c6f-4846-898d-02b1411894a5-config-volume" (OuterVolumeSpecName: "config-volume") pod "171b42e5-2c6f-4846-898d-02b1411894a5" (UID: "171b42e5-2c6f-4846-898d-02b1411894a5"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 07:00:03 crc kubenswrapper[4742]: I1205 07:00:03.787630 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/171b42e5-2c6f-4846-898d-02b1411894a5-kube-api-access-h7kzc" (OuterVolumeSpecName: "kube-api-access-h7kzc") pod "171b42e5-2c6f-4846-898d-02b1411894a5" (UID: "171b42e5-2c6f-4846-898d-02b1411894a5"). InnerVolumeSpecName "kube-api-access-h7kzc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:00:03 crc kubenswrapper[4742]: I1205 07:00:03.789035 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/171b42e5-2c6f-4846-898d-02b1411894a5-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "171b42e5-2c6f-4846-898d-02b1411894a5" (UID: "171b42e5-2c6f-4846-898d-02b1411894a5"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 07:00:03 crc kubenswrapper[4742]: I1205 07:00:03.880670 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7kzc\" (UniqueName: \"kubernetes.io/projected/171b42e5-2c6f-4846-898d-02b1411894a5-kube-api-access-h7kzc\") on node \"crc\" DevicePath \"\"" Dec 05 07:00:03 crc kubenswrapper[4742]: I1205 07:00:03.881127 4742 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/171b42e5-2c6f-4846-898d-02b1411894a5-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 07:00:03 crc kubenswrapper[4742]: I1205 07:00:03.881181 4742 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/171b42e5-2c6f-4846-898d-02b1411894a5-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 07:00:04 crc kubenswrapper[4742]: I1205 07:00:04.270285 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415300-glrv5" event={"ID":"171b42e5-2c6f-4846-898d-02b1411894a5","Type":"ContainerDied","Data":"b1825d6e3892a5a1b05117b45953f617cdf5c968aaf9ba7251969df253f9bb44"} Dec 05 07:00:04 crc kubenswrapper[4742]: I1205 07:00:04.270824 4742 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b1825d6e3892a5a1b05117b45953f617cdf5c968aaf9ba7251969df253f9bb44" Dec 05 07:00:04 crc kubenswrapper[4742]: I1205 07:00:04.270403 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415300-glrv5" Dec 05 07:00:04 crc kubenswrapper[4742]: I1205 07:00:04.733496 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415255-4sqxj"] Dec 05 07:00:04 crc kubenswrapper[4742]: I1205 07:00:04.742960 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415255-4sqxj"] Dec 05 07:00:05 crc kubenswrapper[4742]: I1205 07:00:05.283674 4742 generic.go:334] "Generic (PLEG): container finished" podID="2fc4dcc0-46a2-47d9-8d53-ee7457284c3c" containerID="4ef3a3ad182cde92ec6ccb9d0d30ba577f34467660bfadc4c3a96f89f4d2d906" exitCode=0 Dec 05 07:00:05 crc kubenswrapper[4742]: I1205 07:00:05.283744 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hngqx" event={"ID":"2fc4dcc0-46a2-47d9-8d53-ee7457284c3c","Type":"ContainerDied","Data":"4ef3a3ad182cde92ec6ccb9d0d30ba577f34467660bfadc4c3a96f89f4d2d906"} Dec 05 07:00:06 crc kubenswrapper[4742]: I1205 07:00:06.297162 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hngqx" event={"ID":"2fc4dcc0-46a2-47d9-8d53-ee7457284c3c","Type":"ContainerStarted","Data":"18f0e1981ece6e9d944e425faffe2cbedd0e9b5bbaf9eb5dfeeb1d0e7deb63bc"} Dec 05 07:00:06 crc kubenswrapper[4742]: I1205 07:00:06.336020 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hngqx" podStartSLOduration=2.893984644 podStartE2EDuration="5.335990773s" podCreationTimestamp="2025-12-05 07:00:01 +0000 UTC" firstStartedPulling="2025-12-05 07:00:03.250904319 +0000 UTC m=+4079.163039411" lastFinishedPulling="2025-12-05 07:00:05.692910468 +0000 UTC m=+4081.605045540" observedRunningTime="2025-12-05 07:00:06.322694072 +0000 UTC m=+4082.234829164" watchObservedRunningTime="2025-12-05 07:00:06.335990773 +0000 UTC m=+4082.248125875" Dec 05 07:00:06 crc kubenswrapper[4742]: I1205 07:00:06.410596 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a6d05c8-96c6-4ee1-b695-753e5af543bc" path="/var/lib/kubelet/pods/1a6d05c8-96c6-4ee1-b695-753e5af543bc/volumes" Dec 05 07:00:12 crc kubenswrapper[4742]: I1205 07:00:12.157237 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hngqx" Dec 05 07:00:12 crc kubenswrapper[4742]: I1205 07:00:12.158386 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hngqx" Dec 05 07:00:12 crc kubenswrapper[4742]: I1205 07:00:12.234405 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hngqx" Dec 05 07:00:12 crc kubenswrapper[4742]: I1205 07:00:12.434177 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hngqx" Dec 05 07:00:13 crc kubenswrapper[4742]: I1205 07:00:13.520623 4742 scope.go:117] "RemoveContainer" containerID="e60559e697c4bcd742a29188dfc9b8629d47208eb15203406bb86012cbf04b6b" Dec 05 07:00:14 crc kubenswrapper[4742]: I1205 07:00:14.800758 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hngqx"] Dec 05 07:00:14 crc kubenswrapper[4742]: I1205 07:00:14.802762 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hngqx" podUID="2fc4dcc0-46a2-47d9-8d53-ee7457284c3c" containerName="registry-server" containerID="cri-o://18f0e1981ece6e9d944e425faffe2cbedd0e9b5bbaf9eb5dfeeb1d0e7deb63bc" gracePeriod=2 Dec 05 07:00:15 crc kubenswrapper[4742]: I1205 07:00:15.390864 4742 generic.go:334] "Generic (PLEG): container finished" podID="2fc4dcc0-46a2-47d9-8d53-ee7457284c3c" containerID="18f0e1981ece6e9d944e425faffe2cbedd0e9b5bbaf9eb5dfeeb1d0e7deb63bc" exitCode=0 Dec 05 07:00:15 crc kubenswrapper[4742]: I1205 07:00:15.390930 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hngqx" event={"ID":"2fc4dcc0-46a2-47d9-8d53-ee7457284c3c","Type":"ContainerDied","Data":"18f0e1981ece6e9d944e425faffe2cbedd0e9b5bbaf9eb5dfeeb1d0e7deb63bc"} Dec 05 07:00:15 crc kubenswrapper[4742]: I1205 07:00:15.789510 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hngqx" Dec 05 07:00:15 crc kubenswrapper[4742]: I1205 07:00:15.885827 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fc4dcc0-46a2-47d9-8d53-ee7457284c3c-catalog-content\") pod \"2fc4dcc0-46a2-47d9-8d53-ee7457284c3c\" (UID: \"2fc4dcc0-46a2-47d9-8d53-ee7457284c3c\") " Dec 05 07:00:15 crc kubenswrapper[4742]: I1205 07:00:15.886023 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fc4dcc0-46a2-47d9-8d53-ee7457284c3c-utilities\") pod \"2fc4dcc0-46a2-47d9-8d53-ee7457284c3c\" (UID: \"2fc4dcc0-46a2-47d9-8d53-ee7457284c3c\") " Dec 05 07:00:15 crc kubenswrapper[4742]: I1205 07:00:15.886241 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wfbhx\" (UniqueName: \"kubernetes.io/projected/2fc4dcc0-46a2-47d9-8d53-ee7457284c3c-kube-api-access-wfbhx\") pod \"2fc4dcc0-46a2-47d9-8d53-ee7457284c3c\" (UID: \"2fc4dcc0-46a2-47d9-8d53-ee7457284c3c\") " Dec 05 07:00:15 crc kubenswrapper[4742]: I1205 07:00:15.888272 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2fc4dcc0-46a2-47d9-8d53-ee7457284c3c-utilities" (OuterVolumeSpecName: "utilities") pod "2fc4dcc0-46a2-47d9-8d53-ee7457284c3c" (UID: "2fc4dcc0-46a2-47d9-8d53-ee7457284c3c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:00:15 crc kubenswrapper[4742]: I1205 07:00:15.895320 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fc4dcc0-46a2-47d9-8d53-ee7457284c3c-kube-api-access-wfbhx" (OuterVolumeSpecName: "kube-api-access-wfbhx") pod "2fc4dcc0-46a2-47d9-8d53-ee7457284c3c" (UID: "2fc4dcc0-46a2-47d9-8d53-ee7457284c3c"). InnerVolumeSpecName "kube-api-access-wfbhx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:00:15 crc kubenswrapper[4742]: I1205 07:00:15.951264 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2fc4dcc0-46a2-47d9-8d53-ee7457284c3c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2fc4dcc0-46a2-47d9-8d53-ee7457284c3c" (UID: "2fc4dcc0-46a2-47d9-8d53-ee7457284c3c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:00:15 crc kubenswrapper[4742]: I1205 07:00:15.987776 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wfbhx\" (UniqueName: \"kubernetes.io/projected/2fc4dcc0-46a2-47d9-8d53-ee7457284c3c-kube-api-access-wfbhx\") on node \"crc\" DevicePath \"\"" Dec 05 07:00:15 crc kubenswrapper[4742]: I1205 07:00:15.987819 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2fc4dcc0-46a2-47d9-8d53-ee7457284c3c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 07:00:15 crc kubenswrapper[4742]: I1205 07:00:15.987831 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2fc4dcc0-46a2-47d9-8d53-ee7457284c3c-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 07:00:16 crc kubenswrapper[4742]: I1205 07:00:16.403853 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hngqx" event={"ID":"2fc4dcc0-46a2-47d9-8d53-ee7457284c3c","Type":"ContainerDied","Data":"b98dedccbb852ff5f09f39df4a159899162323b385e7fd106c2f710737a4d063"} Dec 05 07:00:16 crc kubenswrapper[4742]: I1205 07:00:16.403913 4742 scope.go:117] "RemoveContainer" containerID="18f0e1981ece6e9d944e425faffe2cbedd0e9b5bbaf9eb5dfeeb1d0e7deb63bc" Dec 05 07:00:16 crc kubenswrapper[4742]: I1205 07:00:16.404032 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hngqx" Dec 05 07:00:16 crc kubenswrapper[4742]: I1205 07:00:16.431312 4742 scope.go:117] "RemoveContainer" containerID="4ef3a3ad182cde92ec6ccb9d0d30ba577f34467660bfadc4c3a96f89f4d2d906" Dec 05 07:00:16 crc kubenswrapper[4742]: I1205 07:00:16.463387 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hngqx"] Dec 05 07:00:16 crc kubenswrapper[4742]: I1205 07:00:16.464734 4742 scope.go:117] "RemoveContainer" containerID="28bac1fddad26bbf7619dfde672c85fe4c9062776e9ac1d6daa1fc987b0e4ec7" Dec 05 07:00:16 crc kubenswrapper[4742]: I1205 07:00:16.469352 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hngqx"] Dec 05 07:00:18 crc kubenswrapper[4742]: I1205 07:00:18.396221 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2fc4dcc0-46a2-47d9-8d53-ee7457284c3c" path="/var/lib/kubelet/pods/2fc4dcc0-46a2-47d9-8d53-ee7457284c3c/volumes" Dec 05 07:00:36 crc kubenswrapper[4742]: I1205 07:00:36.447775 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8qjtm"] Dec 05 07:00:36 crc kubenswrapper[4742]: E1205 07:00:36.449981 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fc4dcc0-46a2-47d9-8d53-ee7457284c3c" containerName="extract-utilities" Dec 05 07:00:36 crc kubenswrapper[4742]: I1205 07:00:36.450003 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fc4dcc0-46a2-47d9-8d53-ee7457284c3c" containerName="extract-utilities" Dec 05 07:00:36 crc kubenswrapper[4742]: E1205 07:00:36.450028 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fc4dcc0-46a2-47d9-8d53-ee7457284c3c" containerName="extract-content" Dec 05 07:00:36 crc kubenswrapper[4742]: I1205 07:00:36.450037 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fc4dcc0-46a2-47d9-8d53-ee7457284c3c" containerName="extract-content" Dec 05 07:00:36 crc kubenswrapper[4742]: E1205 07:00:36.450078 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fc4dcc0-46a2-47d9-8d53-ee7457284c3c" containerName="registry-server" Dec 05 07:00:36 crc kubenswrapper[4742]: I1205 07:00:36.450088 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fc4dcc0-46a2-47d9-8d53-ee7457284c3c" containerName="registry-server" Dec 05 07:00:36 crc kubenswrapper[4742]: E1205 07:00:36.450112 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="171b42e5-2c6f-4846-898d-02b1411894a5" containerName="collect-profiles" Dec 05 07:00:36 crc kubenswrapper[4742]: I1205 07:00:36.450119 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="171b42e5-2c6f-4846-898d-02b1411894a5" containerName="collect-profiles" Dec 05 07:00:36 crc kubenswrapper[4742]: I1205 07:00:36.450285 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="171b42e5-2c6f-4846-898d-02b1411894a5" containerName="collect-profiles" Dec 05 07:00:36 crc kubenswrapper[4742]: I1205 07:00:36.450304 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fc4dcc0-46a2-47d9-8d53-ee7457284c3c" containerName="registry-server" Dec 05 07:00:36 crc kubenswrapper[4742]: I1205 07:00:36.451563 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8qjtm" Dec 05 07:00:36 crc kubenswrapper[4742]: I1205 07:00:36.469367 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8qjtm"] Dec 05 07:00:36 crc kubenswrapper[4742]: I1205 07:00:36.622328 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88777b04-7e1a-40b8-8618-15a1aa11797b-catalog-content\") pod \"redhat-marketplace-8qjtm\" (UID: \"88777b04-7e1a-40b8-8618-15a1aa11797b\") " pod="openshift-marketplace/redhat-marketplace-8qjtm" Dec 05 07:00:36 crc kubenswrapper[4742]: I1205 07:00:36.622389 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgcc8\" (UniqueName: \"kubernetes.io/projected/88777b04-7e1a-40b8-8618-15a1aa11797b-kube-api-access-xgcc8\") pod \"redhat-marketplace-8qjtm\" (UID: \"88777b04-7e1a-40b8-8618-15a1aa11797b\") " pod="openshift-marketplace/redhat-marketplace-8qjtm" Dec 05 07:00:36 crc kubenswrapper[4742]: I1205 07:00:36.622464 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88777b04-7e1a-40b8-8618-15a1aa11797b-utilities\") pod \"redhat-marketplace-8qjtm\" (UID: \"88777b04-7e1a-40b8-8618-15a1aa11797b\") " pod="openshift-marketplace/redhat-marketplace-8qjtm" Dec 05 07:00:36 crc kubenswrapper[4742]: I1205 07:00:36.723778 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88777b04-7e1a-40b8-8618-15a1aa11797b-utilities\") pod \"redhat-marketplace-8qjtm\" (UID: \"88777b04-7e1a-40b8-8618-15a1aa11797b\") " pod="openshift-marketplace/redhat-marketplace-8qjtm" Dec 05 07:00:36 crc kubenswrapper[4742]: I1205 07:00:36.723833 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88777b04-7e1a-40b8-8618-15a1aa11797b-catalog-content\") pod \"redhat-marketplace-8qjtm\" (UID: \"88777b04-7e1a-40b8-8618-15a1aa11797b\") " pod="openshift-marketplace/redhat-marketplace-8qjtm" Dec 05 07:00:36 crc kubenswrapper[4742]: I1205 07:00:36.723856 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgcc8\" (UniqueName: \"kubernetes.io/projected/88777b04-7e1a-40b8-8618-15a1aa11797b-kube-api-access-xgcc8\") pod \"redhat-marketplace-8qjtm\" (UID: \"88777b04-7e1a-40b8-8618-15a1aa11797b\") " pod="openshift-marketplace/redhat-marketplace-8qjtm" Dec 05 07:00:36 crc kubenswrapper[4742]: I1205 07:00:36.724439 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88777b04-7e1a-40b8-8618-15a1aa11797b-utilities\") pod \"redhat-marketplace-8qjtm\" (UID: \"88777b04-7e1a-40b8-8618-15a1aa11797b\") " pod="openshift-marketplace/redhat-marketplace-8qjtm" Dec 05 07:00:36 crc kubenswrapper[4742]: I1205 07:00:36.724587 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88777b04-7e1a-40b8-8618-15a1aa11797b-catalog-content\") pod \"redhat-marketplace-8qjtm\" (UID: \"88777b04-7e1a-40b8-8618-15a1aa11797b\") " pod="openshift-marketplace/redhat-marketplace-8qjtm" Dec 05 07:00:36 crc kubenswrapper[4742]: I1205 07:00:36.745965 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgcc8\" (UniqueName: \"kubernetes.io/projected/88777b04-7e1a-40b8-8618-15a1aa11797b-kube-api-access-xgcc8\") pod \"redhat-marketplace-8qjtm\" (UID: \"88777b04-7e1a-40b8-8618-15a1aa11797b\") " pod="openshift-marketplace/redhat-marketplace-8qjtm" Dec 05 07:00:36 crc kubenswrapper[4742]: I1205 07:00:36.778630 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8qjtm" Dec 05 07:00:37 crc kubenswrapper[4742]: I1205 07:00:37.052388 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8qjtm"] Dec 05 07:00:37 crc kubenswrapper[4742]: I1205 07:00:37.588437 4742 generic.go:334] "Generic (PLEG): container finished" podID="88777b04-7e1a-40b8-8618-15a1aa11797b" containerID="24102c9618ca627d413b8039173e2581df4f1c5a528ba3160bb1d0fd6d50a188" exitCode=0 Dec 05 07:00:37 crc kubenswrapper[4742]: I1205 07:00:37.588534 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8qjtm" event={"ID":"88777b04-7e1a-40b8-8618-15a1aa11797b","Type":"ContainerDied","Data":"24102c9618ca627d413b8039173e2581df4f1c5a528ba3160bb1d0fd6d50a188"} Dec 05 07:00:37 crc kubenswrapper[4742]: I1205 07:00:37.588797 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8qjtm" event={"ID":"88777b04-7e1a-40b8-8618-15a1aa11797b","Type":"ContainerStarted","Data":"8376c19e2eae1a7f91612a8f1e86d164c2e7e7feacc1eb46d9416270cb1e37e1"} Dec 05 07:00:39 crc kubenswrapper[4742]: I1205 07:00:39.607777 4742 generic.go:334] "Generic (PLEG): container finished" podID="88777b04-7e1a-40b8-8618-15a1aa11797b" containerID="013d1e307a48702dfc2a47a11d46f5e80088371946cf59bc377de7e3ab109003" exitCode=0 Dec 05 07:00:39 crc kubenswrapper[4742]: I1205 07:00:39.607899 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8qjtm" event={"ID":"88777b04-7e1a-40b8-8618-15a1aa11797b","Type":"ContainerDied","Data":"013d1e307a48702dfc2a47a11d46f5e80088371946cf59bc377de7e3ab109003"} Dec 05 07:00:40 crc kubenswrapper[4742]: I1205 07:00:40.618328 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8qjtm" event={"ID":"88777b04-7e1a-40b8-8618-15a1aa11797b","Type":"ContainerStarted","Data":"b6595fc4db4ad81311eccb9729e73d3d876500e35225a522894a46cac57f0dde"} Dec 05 07:00:40 crc kubenswrapper[4742]: I1205 07:00:40.643140 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8qjtm" podStartSLOduration=2.103902693 podStartE2EDuration="4.64312844s" podCreationTimestamp="2025-12-05 07:00:36 +0000 UTC" firstStartedPulling="2025-12-05 07:00:37.590352999 +0000 UTC m=+4113.502488061" lastFinishedPulling="2025-12-05 07:00:40.129578746 +0000 UTC m=+4116.041713808" observedRunningTime="2025-12-05 07:00:40.641047216 +0000 UTC m=+4116.553182278" watchObservedRunningTime="2025-12-05 07:00:40.64312844 +0000 UTC m=+4116.555263502" Dec 05 07:00:46 crc kubenswrapper[4742]: I1205 07:00:46.779253 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8qjtm" Dec 05 07:00:46 crc kubenswrapper[4742]: I1205 07:00:46.779913 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8qjtm" Dec 05 07:00:46 crc kubenswrapper[4742]: I1205 07:00:46.857697 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8qjtm" Dec 05 07:00:47 crc kubenswrapper[4742]: I1205 07:00:47.757776 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8qjtm" Dec 05 07:00:47 crc kubenswrapper[4742]: I1205 07:00:47.813309 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8qjtm"] Dec 05 07:00:49 crc kubenswrapper[4742]: I1205 07:00:49.704898 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-8qjtm" podUID="88777b04-7e1a-40b8-8618-15a1aa11797b" containerName="registry-server" containerID="cri-o://b6595fc4db4ad81311eccb9729e73d3d876500e35225a522894a46cac57f0dde" gracePeriod=2 Dec 05 07:00:50 crc kubenswrapper[4742]: I1205 07:00:50.688393 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8qjtm" Dec 05 07:00:50 crc kubenswrapper[4742]: I1205 07:00:50.719030 4742 generic.go:334] "Generic (PLEG): container finished" podID="88777b04-7e1a-40b8-8618-15a1aa11797b" containerID="b6595fc4db4ad81311eccb9729e73d3d876500e35225a522894a46cac57f0dde" exitCode=0 Dec 05 07:00:50 crc kubenswrapper[4742]: I1205 07:00:50.719103 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8qjtm" event={"ID":"88777b04-7e1a-40b8-8618-15a1aa11797b","Type":"ContainerDied","Data":"b6595fc4db4ad81311eccb9729e73d3d876500e35225a522894a46cac57f0dde"} Dec 05 07:00:50 crc kubenswrapper[4742]: I1205 07:00:50.719137 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8qjtm" event={"ID":"88777b04-7e1a-40b8-8618-15a1aa11797b","Type":"ContainerDied","Data":"8376c19e2eae1a7f91612a8f1e86d164c2e7e7feacc1eb46d9416270cb1e37e1"} Dec 05 07:00:50 crc kubenswrapper[4742]: I1205 07:00:50.719159 4742 scope.go:117] "RemoveContainer" containerID="b6595fc4db4ad81311eccb9729e73d3d876500e35225a522894a46cac57f0dde" Dec 05 07:00:50 crc kubenswrapper[4742]: I1205 07:00:50.719298 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8qjtm" Dec 05 07:00:50 crc kubenswrapper[4742]: I1205 07:00:50.764483 4742 scope.go:117] "RemoveContainer" containerID="013d1e307a48702dfc2a47a11d46f5e80088371946cf59bc377de7e3ab109003" Dec 05 07:00:50 crc kubenswrapper[4742]: I1205 07:00:50.798873 4742 scope.go:117] "RemoveContainer" containerID="24102c9618ca627d413b8039173e2581df4f1c5a528ba3160bb1d0fd6d50a188" Dec 05 07:00:50 crc kubenswrapper[4742]: I1205 07:00:50.817684 4742 scope.go:117] "RemoveContainer" containerID="b6595fc4db4ad81311eccb9729e73d3d876500e35225a522894a46cac57f0dde" Dec 05 07:00:50 crc kubenswrapper[4742]: E1205 07:00:50.818164 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6595fc4db4ad81311eccb9729e73d3d876500e35225a522894a46cac57f0dde\": container with ID starting with b6595fc4db4ad81311eccb9729e73d3d876500e35225a522894a46cac57f0dde not found: ID does not exist" containerID="b6595fc4db4ad81311eccb9729e73d3d876500e35225a522894a46cac57f0dde" Dec 05 07:00:50 crc kubenswrapper[4742]: I1205 07:00:50.818207 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6595fc4db4ad81311eccb9729e73d3d876500e35225a522894a46cac57f0dde"} err="failed to get container status \"b6595fc4db4ad81311eccb9729e73d3d876500e35225a522894a46cac57f0dde\": rpc error: code = NotFound desc = could not find container \"b6595fc4db4ad81311eccb9729e73d3d876500e35225a522894a46cac57f0dde\": container with ID starting with b6595fc4db4ad81311eccb9729e73d3d876500e35225a522894a46cac57f0dde not found: ID does not exist" Dec 05 07:00:50 crc kubenswrapper[4742]: I1205 07:00:50.818262 4742 scope.go:117] "RemoveContainer" containerID="013d1e307a48702dfc2a47a11d46f5e80088371946cf59bc377de7e3ab109003" Dec 05 07:00:50 crc kubenswrapper[4742]: E1205 07:00:50.818556 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"013d1e307a48702dfc2a47a11d46f5e80088371946cf59bc377de7e3ab109003\": container with ID starting with 013d1e307a48702dfc2a47a11d46f5e80088371946cf59bc377de7e3ab109003 not found: ID does not exist" containerID="013d1e307a48702dfc2a47a11d46f5e80088371946cf59bc377de7e3ab109003" Dec 05 07:00:50 crc kubenswrapper[4742]: I1205 07:00:50.818595 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"013d1e307a48702dfc2a47a11d46f5e80088371946cf59bc377de7e3ab109003"} err="failed to get container status \"013d1e307a48702dfc2a47a11d46f5e80088371946cf59bc377de7e3ab109003\": rpc error: code = NotFound desc = could not find container \"013d1e307a48702dfc2a47a11d46f5e80088371946cf59bc377de7e3ab109003\": container with ID starting with 013d1e307a48702dfc2a47a11d46f5e80088371946cf59bc377de7e3ab109003 not found: ID does not exist" Dec 05 07:00:50 crc kubenswrapper[4742]: I1205 07:00:50.818623 4742 scope.go:117] "RemoveContainer" containerID="24102c9618ca627d413b8039173e2581df4f1c5a528ba3160bb1d0fd6d50a188" Dec 05 07:00:50 crc kubenswrapper[4742]: E1205 07:00:50.818845 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24102c9618ca627d413b8039173e2581df4f1c5a528ba3160bb1d0fd6d50a188\": container with ID starting with 24102c9618ca627d413b8039173e2581df4f1c5a528ba3160bb1d0fd6d50a188 not found: ID does not exist" containerID="24102c9618ca627d413b8039173e2581df4f1c5a528ba3160bb1d0fd6d50a188" Dec 05 07:00:50 crc kubenswrapper[4742]: I1205 07:00:50.818879 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24102c9618ca627d413b8039173e2581df4f1c5a528ba3160bb1d0fd6d50a188"} err="failed to get container status \"24102c9618ca627d413b8039173e2581df4f1c5a528ba3160bb1d0fd6d50a188\": rpc error: code = NotFound desc = could not find container \"24102c9618ca627d413b8039173e2581df4f1c5a528ba3160bb1d0fd6d50a188\": container with ID starting with 24102c9618ca627d413b8039173e2581df4f1c5a528ba3160bb1d0fd6d50a188 not found: ID does not exist" Dec 05 07:00:50 crc kubenswrapper[4742]: I1205 07:00:50.853511 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88777b04-7e1a-40b8-8618-15a1aa11797b-catalog-content\") pod \"88777b04-7e1a-40b8-8618-15a1aa11797b\" (UID: \"88777b04-7e1a-40b8-8618-15a1aa11797b\") " Dec 05 07:00:50 crc kubenswrapper[4742]: I1205 07:00:50.853583 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xgcc8\" (UniqueName: \"kubernetes.io/projected/88777b04-7e1a-40b8-8618-15a1aa11797b-kube-api-access-xgcc8\") pod \"88777b04-7e1a-40b8-8618-15a1aa11797b\" (UID: \"88777b04-7e1a-40b8-8618-15a1aa11797b\") " Dec 05 07:00:50 crc kubenswrapper[4742]: I1205 07:00:50.853611 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88777b04-7e1a-40b8-8618-15a1aa11797b-utilities\") pod \"88777b04-7e1a-40b8-8618-15a1aa11797b\" (UID: \"88777b04-7e1a-40b8-8618-15a1aa11797b\") " Dec 05 07:00:50 crc kubenswrapper[4742]: I1205 07:00:50.854709 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88777b04-7e1a-40b8-8618-15a1aa11797b-utilities" (OuterVolumeSpecName: "utilities") pod "88777b04-7e1a-40b8-8618-15a1aa11797b" (UID: "88777b04-7e1a-40b8-8618-15a1aa11797b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:00:50 crc kubenswrapper[4742]: I1205 07:00:50.860196 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88777b04-7e1a-40b8-8618-15a1aa11797b-kube-api-access-xgcc8" (OuterVolumeSpecName: "kube-api-access-xgcc8") pod "88777b04-7e1a-40b8-8618-15a1aa11797b" (UID: "88777b04-7e1a-40b8-8618-15a1aa11797b"). InnerVolumeSpecName "kube-api-access-xgcc8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:00:50 crc kubenswrapper[4742]: I1205 07:00:50.878614 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88777b04-7e1a-40b8-8618-15a1aa11797b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "88777b04-7e1a-40b8-8618-15a1aa11797b" (UID: "88777b04-7e1a-40b8-8618-15a1aa11797b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:00:50 crc kubenswrapper[4742]: I1205 07:00:50.955477 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/88777b04-7e1a-40b8-8618-15a1aa11797b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 07:00:50 crc kubenswrapper[4742]: I1205 07:00:50.955548 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xgcc8\" (UniqueName: \"kubernetes.io/projected/88777b04-7e1a-40b8-8618-15a1aa11797b-kube-api-access-xgcc8\") on node \"crc\" DevicePath \"\"" Dec 05 07:00:50 crc kubenswrapper[4742]: I1205 07:00:50.955578 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/88777b04-7e1a-40b8-8618-15a1aa11797b-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 07:00:51 crc kubenswrapper[4742]: I1205 07:00:51.075343 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8qjtm"] Dec 05 07:00:51 crc kubenswrapper[4742]: I1205 07:00:51.086183 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-8qjtm"] Dec 05 07:00:52 crc kubenswrapper[4742]: I1205 07:00:52.392807 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88777b04-7e1a-40b8-8618-15a1aa11797b" path="/var/lib/kubelet/pods/88777b04-7e1a-40b8-8618-15a1aa11797b/volumes" Dec 05 07:01:13 crc kubenswrapper[4742]: I1205 07:01:13.606179 4742 scope.go:117] "RemoveContainer" containerID="cbb26caeb18ef452b797781a19c6354cb9528c94e652bb322c30d4ec28c8de79" Dec 05 07:01:13 crc kubenswrapper[4742]: I1205 07:01:13.639797 4742 scope.go:117] "RemoveContainer" containerID="19fc91d30bbc9faac3a7e87e828acede3933226ffaf2c69bfbe9ec3a6cae4252" Dec 05 07:01:13 crc kubenswrapper[4742]: I1205 07:01:13.679730 4742 scope.go:117] "RemoveContainer" containerID="938b3806ea32abae587fd6c74828043f871dac7687d729ac3c6f1e2cb6563c9b" Dec 05 07:01:46 crc kubenswrapper[4742]: I1205 07:01:46.671011 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:01:46 crc kubenswrapper[4742]: I1205 07:01:46.671814 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:02:07 crc kubenswrapper[4742]: I1205 07:02:07.202650 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-ts8tc"] Dec 05 07:02:07 crc kubenswrapper[4742]: E1205 07:02:07.204231 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88777b04-7e1a-40b8-8618-15a1aa11797b" containerName="registry-server" Dec 05 07:02:07 crc kubenswrapper[4742]: I1205 07:02:07.204260 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="88777b04-7e1a-40b8-8618-15a1aa11797b" containerName="registry-server" Dec 05 07:02:07 crc kubenswrapper[4742]: E1205 07:02:07.204288 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88777b04-7e1a-40b8-8618-15a1aa11797b" containerName="extract-utilities" Dec 05 07:02:07 crc kubenswrapper[4742]: I1205 07:02:07.204301 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="88777b04-7e1a-40b8-8618-15a1aa11797b" containerName="extract-utilities" Dec 05 07:02:07 crc kubenswrapper[4742]: E1205 07:02:07.204329 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88777b04-7e1a-40b8-8618-15a1aa11797b" containerName="extract-content" Dec 05 07:02:07 crc kubenswrapper[4742]: I1205 07:02:07.204342 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="88777b04-7e1a-40b8-8618-15a1aa11797b" containerName="extract-content" Dec 05 07:02:07 crc kubenswrapper[4742]: I1205 07:02:07.204595 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="88777b04-7e1a-40b8-8618-15a1aa11797b" containerName="registry-server" Dec 05 07:02:07 crc kubenswrapper[4742]: I1205 07:02:07.206607 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ts8tc" Dec 05 07:02:07 crc kubenswrapper[4742]: I1205 07:02:07.222409 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ts8tc"] Dec 05 07:02:07 crc kubenswrapper[4742]: I1205 07:02:07.288129 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mdf4\" (UniqueName: \"kubernetes.io/projected/e79260e1-b85b-491f-87a7-f82f076f6603-kube-api-access-9mdf4\") pod \"redhat-operators-ts8tc\" (UID: \"e79260e1-b85b-491f-87a7-f82f076f6603\") " pod="openshift-marketplace/redhat-operators-ts8tc" Dec 05 07:02:07 crc kubenswrapper[4742]: I1205 07:02:07.288198 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e79260e1-b85b-491f-87a7-f82f076f6603-utilities\") pod \"redhat-operators-ts8tc\" (UID: \"e79260e1-b85b-491f-87a7-f82f076f6603\") " pod="openshift-marketplace/redhat-operators-ts8tc" Dec 05 07:02:07 crc kubenswrapper[4742]: I1205 07:02:07.288444 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e79260e1-b85b-491f-87a7-f82f076f6603-catalog-content\") pod \"redhat-operators-ts8tc\" (UID: \"e79260e1-b85b-491f-87a7-f82f076f6603\") " pod="openshift-marketplace/redhat-operators-ts8tc" Dec 05 07:02:07 crc kubenswrapper[4742]: I1205 07:02:07.390374 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mdf4\" (UniqueName: \"kubernetes.io/projected/e79260e1-b85b-491f-87a7-f82f076f6603-kube-api-access-9mdf4\") pod \"redhat-operators-ts8tc\" (UID: \"e79260e1-b85b-491f-87a7-f82f076f6603\") " pod="openshift-marketplace/redhat-operators-ts8tc" Dec 05 07:02:07 crc kubenswrapper[4742]: I1205 07:02:07.390476 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e79260e1-b85b-491f-87a7-f82f076f6603-utilities\") pod \"redhat-operators-ts8tc\" (UID: \"e79260e1-b85b-491f-87a7-f82f076f6603\") " pod="openshift-marketplace/redhat-operators-ts8tc" Dec 05 07:02:07 crc kubenswrapper[4742]: I1205 07:02:07.390588 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e79260e1-b85b-491f-87a7-f82f076f6603-catalog-content\") pod \"redhat-operators-ts8tc\" (UID: \"e79260e1-b85b-491f-87a7-f82f076f6603\") " pod="openshift-marketplace/redhat-operators-ts8tc" Dec 05 07:02:07 crc kubenswrapper[4742]: I1205 07:02:07.391086 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e79260e1-b85b-491f-87a7-f82f076f6603-utilities\") pod \"redhat-operators-ts8tc\" (UID: \"e79260e1-b85b-491f-87a7-f82f076f6603\") " pod="openshift-marketplace/redhat-operators-ts8tc" Dec 05 07:02:07 crc kubenswrapper[4742]: I1205 07:02:07.391208 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e79260e1-b85b-491f-87a7-f82f076f6603-catalog-content\") pod \"redhat-operators-ts8tc\" (UID: \"e79260e1-b85b-491f-87a7-f82f076f6603\") " pod="openshift-marketplace/redhat-operators-ts8tc" Dec 05 07:02:07 crc kubenswrapper[4742]: I1205 07:02:07.420431 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mdf4\" (UniqueName: \"kubernetes.io/projected/e79260e1-b85b-491f-87a7-f82f076f6603-kube-api-access-9mdf4\") pod \"redhat-operators-ts8tc\" (UID: \"e79260e1-b85b-491f-87a7-f82f076f6603\") " pod="openshift-marketplace/redhat-operators-ts8tc" Dec 05 07:02:07 crc kubenswrapper[4742]: I1205 07:02:07.552991 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ts8tc" Dec 05 07:02:07 crc kubenswrapper[4742]: I1205 07:02:07.970545 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ts8tc"] Dec 05 07:02:08 crc kubenswrapper[4742]: I1205 07:02:08.459803 4742 generic.go:334] "Generic (PLEG): container finished" podID="e79260e1-b85b-491f-87a7-f82f076f6603" containerID="1f1504d92c2f5b6726b73bb7581820b4a35881c145b0d5f629e98034375aa6e2" exitCode=0 Dec 05 07:02:08 crc kubenswrapper[4742]: I1205 07:02:08.459848 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ts8tc" event={"ID":"e79260e1-b85b-491f-87a7-f82f076f6603","Type":"ContainerDied","Data":"1f1504d92c2f5b6726b73bb7581820b4a35881c145b0d5f629e98034375aa6e2"} Dec 05 07:02:08 crc kubenswrapper[4742]: I1205 07:02:08.459872 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ts8tc" event={"ID":"e79260e1-b85b-491f-87a7-f82f076f6603","Type":"ContainerStarted","Data":"4d9205a1704a2259e0258f14bd2830a5db4d851b96f14bc0584193a0445ebe61"} Dec 05 07:02:09 crc kubenswrapper[4742]: I1205 07:02:09.468640 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ts8tc" event={"ID":"e79260e1-b85b-491f-87a7-f82f076f6603","Type":"ContainerStarted","Data":"95b9ccc12fdfc702f524e2e9467ac1b4d9e2159dbdef58b6960080a4e03c378f"} Dec 05 07:02:10 crc kubenswrapper[4742]: I1205 07:02:10.480628 4742 generic.go:334] "Generic (PLEG): container finished" podID="e79260e1-b85b-491f-87a7-f82f076f6603" containerID="95b9ccc12fdfc702f524e2e9467ac1b4d9e2159dbdef58b6960080a4e03c378f" exitCode=0 Dec 05 07:02:10 crc kubenswrapper[4742]: I1205 07:02:10.480739 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ts8tc" event={"ID":"e79260e1-b85b-491f-87a7-f82f076f6603","Type":"ContainerDied","Data":"95b9ccc12fdfc702f524e2e9467ac1b4d9e2159dbdef58b6960080a4e03c378f"} Dec 05 07:02:11 crc kubenswrapper[4742]: I1205 07:02:11.492172 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ts8tc" event={"ID":"e79260e1-b85b-491f-87a7-f82f076f6603","Type":"ContainerStarted","Data":"eff15a7a44ab2ebb50d1ab4b1456ea6fa2833b8b794e9915572c5429e25af51c"} Dec 05 07:02:11 crc kubenswrapper[4742]: I1205 07:02:11.532606 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-ts8tc" podStartSLOduration=2.147826003 podStartE2EDuration="4.532587511s" podCreationTimestamp="2025-12-05 07:02:07 +0000 UTC" firstStartedPulling="2025-12-05 07:02:08.461780524 +0000 UTC m=+4204.373915586" lastFinishedPulling="2025-12-05 07:02:10.846542032 +0000 UTC m=+4206.758677094" observedRunningTime="2025-12-05 07:02:11.524479047 +0000 UTC m=+4207.436614189" watchObservedRunningTime="2025-12-05 07:02:11.532587511 +0000 UTC m=+4207.444722583" Dec 05 07:02:16 crc kubenswrapper[4742]: I1205 07:02:16.671409 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:02:16 crc kubenswrapper[4742]: I1205 07:02:16.671983 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:02:17 crc kubenswrapper[4742]: I1205 07:02:17.554538 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-ts8tc" Dec 05 07:02:17 crc kubenswrapper[4742]: I1205 07:02:17.556033 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-ts8tc" Dec 05 07:02:18 crc kubenswrapper[4742]: I1205 07:02:18.642671 4742 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-ts8tc" podUID="e79260e1-b85b-491f-87a7-f82f076f6603" containerName="registry-server" probeResult="failure" output=< Dec 05 07:02:18 crc kubenswrapper[4742]: timeout: failed to connect service ":50051" within 1s Dec 05 07:02:18 crc kubenswrapper[4742]: > Dec 05 07:02:27 crc kubenswrapper[4742]: I1205 07:02:27.617326 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-ts8tc" Dec 05 07:02:27 crc kubenswrapper[4742]: I1205 07:02:27.681800 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-ts8tc" Dec 05 07:02:27 crc kubenswrapper[4742]: I1205 07:02:27.868988 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ts8tc"] Dec 05 07:02:28 crc kubenswrapper[4742]: I1205 07:02:28.640620 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-ts8tc" podUID="e79260e1-b85b-491f-87a7-f82f076f6603" containerName="registry-server" containerID="cri-o://eff15a7a44ab2ebb50d1ab4b1456ea6fa2833b8b794e9915572c5429e25af51c" gracePeriod=2 Dec 05 07:02:30 crc kubenswrapper[4742]: I1205 07:02:30.150613 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ts8tc" Dec 05 07:02:30 crc kubenswrapper[4742]: I1205 07:02:30.253378 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e79260e1-b85b-491f-87a7-f82f076f6603-catalog-content\") pod \"e79260e1-b85b-491f-87a7-f82f076f6603\" (UID: \"e79260e1-b85b-491f-87a7-f82f076f6603\") " Dec 05 07:02:30 crc kubenswrapper[4742]: I1205 07:02:30.253504 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e79260e1-b85b-491f-87a7-f82f076f6603-utilities\") pod \"e79260e1-b85b-491f-87a7-f82f076f6603\" (UID: \"e79260e1-b85b-491f-87a7-f82f076f6603\") " Dec 05 07:02:30 crc kubenswrapper[4742]: I1205 07:02:30.253636 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9mdf4\" (UniqueName: \"kubernetes.io/projected/e79260e1-b85b-491f-87a7-f82f076f6603-kube-api-access-9mdf4\") pod \"e79260e1-b85b-491f-87a7-f82f076f6603\" (UID: \"e79260e1-b85b-491f-87a7-f82f076f6603\") " Dec 05 07:02:30 crc kubenswrapper[4742]: I1205 07:02:30.254397 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e79260e1-b85b-491f-87a7-f82f076f6603-utilities" (OuterVolumeSpecName: "utilities") pod "e79260e1-b85b-491f-87a7-f82f076f6603" (UID: "e79260e1-b85b-491f-87a7-f82f076f6603"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:02:30 crc kubenswrapper[4742]: I1205 07:02:30.260873 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e79260e1-b85b-491f-87a7-f82f076f6603-kube-api-access-9mdf4" (OuterVolumeSpecName: "kube-api-access-9mdf4") pod "e79260e1-b85b-491f-87a7-f82f076f6603" (UID: "e79260e1-b85b-491f-87a7-f82f076f6603"). InnerVolumeSpecName "kube-api-access-9mdf4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:02:30 crc kubenswrapper[4742]: I1205 07:02:30.355421 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e79260e1-b85b-491f-87a7-f82f076f6603-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 07:02:30 crc kubenswrapper[4742]: I1205 07:02:30.355457 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9mdf4\" (UniqueName: \"kubernetes.io/projected/e79260e1-b85b-491f-87a7-f82f076f6603-kube-api-access-9mdf4\") on node \"crc\" DevicePath \"\"" Dec 05 07:02:30 crc kubenswrapper[4742]: I1205 07:02:30.391897 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e79260e1-b85b-491f-87a7-f82f076f6603-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e79260e1-b85b-491f-87a7-f82f076f6603" (UID: "e79260e1-b85b-491f-87a7-f82f076f6603"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:02:30 crc kubenswrapper[4742]: I1205 07:02:30.456990 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e79260e1-b85b-491f-87a7-f82f076f6603-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 07:02:30 crc kubenswrapper[4742]: I1205 07:02:30.660425 4742 generic.go:334] "Generic (PLEG): container finished" podID="e79260e1-b85b-491f-87a7-f82f076f6603" containerID="eff15a7a44ab2ebb50d1ab4b1456ea6fa2833b8b794e9915572c5429e25af51c" exitCode=0 Dec 05 07:02:30 crc kubenswrapper[4742]: I1205 07:02:30.660505 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ts8tc" Dec 05 07:02:30 crc kubenswrapper[4742]: I1205 07:02:30.660508 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ts8tc" event={"ID":"e79260e1-b85b-491f-87a7-f82f076f6603","Type":"ContainerDied","Data":"eff15a7a44ab2ebb50d1ab4b1456ea6fa2833b8b794e9915572c5429e25af51c"} Dec 05 07:02:30 crc kubenswrapper[4742]: I1205 07:02:30.661002 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ts8tc" event={"ID":"e79260e1-b85b-491f-87a7-f82f076f6603","Type":"ContainerDied","Data":"4d9205a1704a2259e0258f14bd2830a5db4d851b96f14bc0584193a0445ebe61"} Dec 05 07:02:30 crc kubenswrapper[4742]: I1205 07:02:30.661043 4742 scope.go:117] "RemoveContainer" containerID="eff15a7a44ab2ebb50d1ab4b1456ea6fa2833b8b794e9915572c5429e25af51c" Dec 05 07:02:30 crc kubenswrapper[4742]: I1205 07:02:30.696770 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ts8tc"] Dec 05 07:02:30 crc kubenswrapper[4742]: I1205 07:02:30.704244 4742 scope.go:117] "RemoveContainer" containerID="95b9ccc12fdfc702f524e2e9467ac1b4d9e2159dbdef58b6960080a4e03c378f" Dec 05 07:02:30 crc kubenswrapper[4742]: I1205 07:02:30.709846 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-ts8tc"] Dec 05 07:02:30 crc kubenswrapper[4742]: I1205 07:02:30.736232 4742 scope.go:117] "RemoveContainer" containerID="1f1504d92c2f5b6726b73bb7581820b4a35881c145b0d5f629e98034375aa6e2" Dec 05 07:02:30 crc kubenswrapper[4742]: I1205 07:02:30.765866 4742 scope.go:117] "RemoveContainer" containerID="eff15a7a44ab2ebb50d1ab4b1456ea6fa2833b8b794e9915572c5429e25af51c" Dec 05 07:02:30 crc kubenswrapper[4742]: E1205 07:02:30.766600 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eff15a7a44ab2ebb50d1ab4b1456ea6fa2833b8b794e9915572c5429e25af51c\": container with ID starting with eff15a7a44ab2ebb50d1ab4b1456ea6fa2833b8b794e9915572c5429e25af51c not found: ID does not exist" containerID="eff15a7a44ab2ebb50d1ab4b1456ea6fa2833b8b794e9915572c5429e25af51c" Dec 05 07:02:30 crc kubenswrapper[4742]: I1205 07:02:30.766667 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eff15a7a44ab2ebb50d1ab4b1456ea6fa2833b8b794e9915572c5429e25af51c"} err="failed to get container status \"eff15a7a44ab2ebb50d1ab4b1456ea6fa2833b8b794e9915572c5429e25af51c\": rpc error: code = NotFound desc = could not find container \"eff15a7a44ab2ebb50d1ab4b1456ea6fa2833b8b794e9915572c5429e25af51c\": container with ID starting with eff15a7a44ab2ebb50d1ab4b1456ea6fa2833b8b794e9915572c5429e25af51c not found: ID does not exist" Dec 05 07:02:30 crc kubenswrapper[4742]: I1205 07:02:30.766710 4742 scope.go:117] "RemoveContainer" containerID="95b9ccc12fdfc702f524e2e9467ac1b4d9e2159dbdef58b6960080a4e03c378f" Dec 05 07:02:30 crc kubenswrapper[4742]: E1205 07:02:30.767283 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95b9ccc12fdfc702f524e2e9467ac1b4d9e2159dbdef58b6960080a4e03c378f\": container with ID starting with 95b9ccc12fdfc702f524e2e9467ac1b4d9e2159dbdef58b6960080a4e03c378f not found: ID does not exist" containerID="95b9ccc12fdfc702f524e2e9467ac1b4d9e2159dbdef58b6960080a4e03c378f" Dec 05 07:02:30 crc kubenswrapper[4742]: I1205 07:02:30.767324 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95b9ccc12fdfc702f524e2e9467ac1b4d9e2159dbdef58b6960080a4e03c378f"} err="failed to get container status \"95b9ccc12fdfc702f524e2e9467ac1b4d9e2159dbdef58b6960080a4e03c378f\": rpc error: code = NotFound desc = could not find container \"95b9ccc12fdfc702f524e2e9467ac1b4d9e2159dbdef58b6960080a4e03c378f\": container with ID starting with 95b9ccc12fdfc702f524e2e9467ac1b4d9e2159dbdef58b6960080a4e03c378f not found: ID does not exist" Dec 05 07:02:30 crc kubenswrapper[4742]: I1205 07:02:30.767356 4742 scope.go:117] "RemoveContainer" containerID="1f1504d92c2f5b6726b73bb7581820b4a35881c145b0d5f629e98034375aa6e2" Dec 05 07:02:30 crc kubenswrapper[4742]: E1205 07:02:30.767983 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f1504d92c2f5b6726b73bb7581820b4a35881c145b0d5f629e98034375aa6e2\": container with ID starting with 1f1504d92c2f5b6726b73bb7581820b4a35881c145b0d5f629e98034375aa6e2 not found: ID does not exist" containerID="1f1504d92c2f5b6726b73bb7581820b4a35881c145b0d5f629e98034375aa6e2" Dec 05 07:02:30 crc kubenswrapper[4742]: I1205 07:02:30.768022 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f1504d92c2f5b6726b73bb7581820b4a35881c145b0d5f629e98034375aa6e2"} err="failed to get container status \"1f1504d92c2f5b6726b73bb7581820b4a35881c145b0d5f629e98034375aa6e2\": rpc error: code = NotFound desc = could not find container \"1f1504d92c2f5b6726b73bb7581820b4a35881c145b0d5f629e98034375aa6e2\": container with ID starting with 1f1504d92c2f5b6726b73bb7581820b4a35881c145b0d5f629e98034375aa6e2 not found: ID does not exist" Dec 05 07:02:32 crc kubenswrapper[4742]: I1205 07:02:32.395432 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e79260e1-b85b-491f-87a7-f82f076f6603" path="/var/lib/kubelet/pods/e79260e1-b85b-491f-87a7-f82f076f6603/volumes" Dec 05 07:02:46 crc kubenswrapper[4742]: I1205 07:02:46.671172 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:02:46 crc kubenswrapper[4742]: I1205 07:02:46.672004 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:02:46 crc kubenswrapper[4742]: I1205 07:02:46.672114 4742 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" Dec 05 07:02:46 crc kubenswrapper[4742]: I1205 07:02:46.673199 4742 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cb9a27d77770ffff4e58c7fb9055f8e2b0e937d58b32eb42a5731a840005f97c"} pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 07:02:46 crc kubenswrapper[4742]: I1205 07:02:46.673309 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" containerID="cri-o://cb9a27d77770ffff4e58c7fb9055f8e2b0e937d58b32eb42a5731a840005f97c" gracePeriod=600 Dec 05 07:02:47 crc kubenswrapper[4742]: I1205 07:02:47.836586 4742 generic.go:334] "Generic (PLEG): container finished" podID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerID="cb9a27d77770ffff4e58c7fb9055f8e2b0e937d58b32eb42a5731a840005f97c" exitCode=0 Dec 05 07:02:47 crc kubenswrapper[4742]: I1205 07:02:47.837207 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerDied","Data":"cb9a27d77770ffff4e58c7fb9055f8e2b0e937d58b32eb42a5731a840005f97c"} Dec 05 07:02:47 crc kubenswrapper[4742]: I1205 07:02:47.837489 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerStarted","Data":"9d0dba8a5003e3eaa64e3420091b1d41c9890973aa4238f35e0252d59230877b"} Dec 05 07:02:47 crc kubenswrapper[4742]: I1205 07:02:47.837531 4742 scope.go:117] "RemoveContainer" containerID="f1bd02a09f64f1f2bf064242acc3c037ca6f1fb5e20715818edc26bb1cc2883e" Dec 05 07:04:59 crc kubenswrapper[4742]: I1205 07:04:59.578327 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-f9r8x"] Dec 05 07:04:59 crc kubenswrapper[4742]: E1205 07:04:59.579459 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e79260e1-b85b-491f-87a7-f82f076f6603" containerName="extract-utilities" Dec 05 07:04:59 crc kubenswrapper[4742]: I1205 07:04:59.579486 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="e79260e1-b85b-491f-87a7-f82f076f6603" containerName="extract-utilities" Dec 05 07:04:59 crc kubenswrapper[4742]: E1205 07:04:59.579510 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e79260e1-b85b-491f-87a7-f82f076f6603" containerName="registry-server" Dec 05 07:04:59 crc kubenswrapper[4742]: I1205 07:04:59.579523 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="e79260e1-b85b-491f-87a7-f82f076f6603" containerName="registry-server" Dec 05 07:04:59 crc kubenswrapper[4742]: E1205 07:04:59.579580 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e79260e1-b85b-491f-87a7-f82f076f6603" containerName="extract-content" Dec 05 07:04:59 crc kubenswrapper[4742]: I1205 07:04:59.579594 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="e79260e1-b85b-491f-87a7-f82f076f6603" containerName="extract-content" Dec 05 07:04:59 crc kubenswrapper[4742]: I1205 07:04:59.579863 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="e79260e1-b85b-491f-87a7-f82f076f6603" containerName="registry-server" Dec 05 07:04:59 crc kubenswrapper[4742]: I1205 07:04:59.583338 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-f9r8x" Dec 05 07:04:59 crc kubenswrapper[4742]: I1205 07:04:59.600222 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-f9r8x"] Dec 05 07:04:59 crc kubenswrapper[4742]: I1205 07:04:59.771764 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7185d504-d8ed-450a-af74-ac742b24bb6d-utilities\") pod \"certified-operators-f9r8x\" (UID: \"7185d504-d8ed-450a-af74-ac742b24bb6d\") " pod="openshift-marketplace/certified-operators-f9r8x" Dec 05 07:04:59 crc kubenswrapper[4742]: I1205 07:04:59.771955 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7185d504-d8ed-450a-af74-ac742b24bb6d-catalog-content\") pod \"certified-operators-f9r8x\" (UID: \"7185d504-d8ed-450a-af74-ac742b24bb6d\") " pod="openshift-marketplace/certified-operators-f9r8x" Dec 05 07:04:59 crc kubenswrapper[4742]: I1205 07:04:59.772044 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ws25\" (UniqueName: \"kubernetes.io/projected/7185d504-d8ed-450a-af74-ac742b24bb6d-kube-api-access-2ws25\") pod \"certified-operators-f9r8x\" (UID: \"7185d504-d8ed-450a-af74-ac742b24bb6d\") " pod="openshift-marketplace/certified-operators-f9r8x" Dec 05 07:04:59 crc kubenswrapper[4742]: I1205 07:04:59.874928 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7185d504-d8ed-450a-af74-ac742b24bb6d-catalog-content\") pod \"certified-operators-f9r8x\" (UID: \"7185d504-d8ed-450a-af74-ac742b24bb6d\") " pod="openshift-marketplace/certified-operators-f9r8x" Dec 05 07:04:59 crc kubenswrapper[4742]: I1205 07:04:59.875018 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ws25\" (UniqueName: \"kubernetes.io/projected/7185d504-d8ed-450a-af74-ac742b24bb6d-kube-api-access-2ws25\") pod \"certified-operators-f9r8x\" (UID: \"7185d504-d8ed-450a-af74-ac742b24bb6d\") " pod="openshift-marketplace/certified-operators-f9r8x" Dec 05 07:04:59 crc kubenswrapper[4742]: I1205 07:04:59.875141 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7185d504-d8ed-450a-af74-ac742b24bb6d-utilities\") pod \"certified-operators-f9r8x\" (UID: \"7185d504-d8ed-450a-af74-ac742b24bb6d\") " pod="openshift-marketplace/certified-operators-f9r8x" Dec 05 07:04:59 crc kubenswrapper[4742]: I1205 07:04:59.875777 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7185d504-d8ed-450a-af74-ac742b24bb6d-utilities\") pod \"certified-operators-f9r8x\" (UID: \"7185d504-d8ed-450a-af74-ac742b24bb6d\") " pod="openshift-marketplace/certified-operators-f9r8x" Dec 05 07:04:59 crc kubenswrapper[4742]: I1205 07:04:59.875984 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7185d504-d8ed-450a-af74-ac742b24bb6d-catalog-content\") pod \"certified-operators-f9r8x\" (UID: \"7185d504-d8ed-450a-af74-ac742b24bb6d\") " pod="openshift-marketplace/certified-operators-f9r8x" Dec 05 07:04:59 crc kubenswrapper[4742]: I1205 07:04:59.906562 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ws25\" (UniqueName: \"kubernetes.io/projected/7185d504-d8ed-450a-af74-ac742b24bb6d-kube-api-access-2ws25\") pod \"certified-operators-f9r8x\" (UID: \"7185d504-d8ed-450a-af74-ac742b24bb6d\") " pod="openshift-marketplace/certified-operators-f9r8x" Dec 05 07:04:59 crc kubenswrapper[4742]: I1205 07:04:59.912308 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-f9r8x" Dec 05 07:05:00 crc kubenswrapper[4742]: I1205 07:05:00.436172 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-f9r8x"] Dec 05 07:05:01 crc kubenswrapper[4742]: I1205 07:05:01.155408 4742 generic.go:334] "Generic (PLEG): container finished" podID="7185d504-d8ed-450a-af74-ac742b24bb6d" containerID="bc7f6d0fa881804bcb557c0a7c9895f56205b835fd5efc14df244028ad667af1" exitCode=0 Dec 05 07:05:01 crc kubenswrapper[4742]: I1205 07:05:01.155602 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-f9r8x" event={"ID":"7185d504-d8ed-450a-af74-ac742b24bb6d","Type":"ContainerDied","Data":"bc7f6d0fa881804bcb557c0a7c9895f56205b835fd5efc14df244028ad667af1"} Dec 05 07:05:01 crc kubenswrapper[4742]: I1205 07:05:01.155835 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-f9r8x" event={"ID":"7185d504-d8ed-450a-af74-ac742b24bb6d","Type":"ContainerStarted","Data":"4ad88ae1fb317af38f69015b4c07b274771a5137a1ffd62ea63ee331973ba1ea"} Dec 05 07:05:02 crc kubenswrapper[4742]: I1205 07:05:02.170681 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-f9r8x" event={"ID":"7185d504-d8ed-450a-af74-ac742b24bb6d","Type":"ContainerStarted","Data":"b523bd603c6de04107b4733fdb0d254cbc701534b6749bd14343ef729972ec0a"} Dec 05 07:05:03 crc kubenswrapper[4742]: I1205 07:05:03.183000 4742 generic.go:334] "Generic (PLEG): container finished" podID="7185d504-d8ed-450a-af74-ac742b24bb6d" containerID="b523bd603c6de04107b4733fdb0d254cbc701534b6749bd14343ef729972ec0a" exitCode=0 Dec 05 07:05:03 crc kubenswrapper[4742]: I1205 07:05:03.183133 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-f9r8x" event={"ID":"7185d504-d8ed-450a-af74-ac742b24bb6d","Type":"ContainerDied","Data":"b523bd603c6de04107b4733fdb0d254cbc701534b6749bd14343ef729972ec0a"} Dec 05 07:05:04 crc kubenswrapper[4742]: I1205 07:05:04.196200 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-f9r8x" event={"ID":"7185d504-d8ed-450a-af74-ac742b24bb6d","Type":"ContainerStarted","Data":"04c6d3b06776bceaf7510f53f8cbc378ccb1e4e98921c6240dccec4507f9b75f"} Dec 05 07:05:04 crc kubenswrapper[4742]: I1205 07:05:04.229744 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-f9r8x" podStartSLOduration=2.768543156 podStartE2EDuration="5.229720171s" podCreationTimestamp="2025-12-05 07:04:59 +0000 UTC" firstStartedPulling="2025-12-05 07:05:01.1610103 +0000 UTC m=+4377.073145372" lastFinishedPulling="2025-12-05 07:05:03.622187295 +0000 UTC m=+4379.534322387" observedRunningTime="2025-12-05 07:05:04.220133038 +0000 UTC m=+4380.132268140" watchObservedRunningTime="2025-12-05 07:05:04.229720171 +0000 UTC m=+4380.141855273" Dec 05 07:05:09 crc kubenswrapper[4742]: I1205 07:05:09.913264 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-f9r8x" Dec 05 07:05:09 crc kubenswrapper[4742]: I1205 07:05:09.914398 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-f9r8x" Dec 05 07:05:09 crc kubenswrapper[4742]: I1205 07:05:09.979697 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-f9r8x" Dec 05 07:05:10 crc kubenswrapper[4742]: I1205 07:05:10.308266 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-f9r8x" Dec 05 07:05:10 crc kubenswrapper[4742]: I1205 07:05:10.376652 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-f9r8x"] Dec 05 07:05:12 crc kubenswrapper[4742]: I1205 07:05:12.280226 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-f9r8x" podUID="7185d504-d8ed-450a-af74-ac742b24bb6d" containerName="registry-server" containerID="cri-o://04c6d3b06776bceaf7510f53f8cbc378ccb1e4e98921c6240dccec4507f9b75f" gracePeriod=2 Dec 05 07:05:13 crc kubenswrapper[4742]: I1205 07:05:13.294697 4742 generic.go:334] "Generic (PLEG): container finished" podID="7185d504-d8ed-450a-af74-ac742b24bb6d" containerID="04c6d3b06776bceaf7510f53f8cbc378ccb1e4e98921c6240dccec4507f9b75f" exitCode=0 Dec 05 07:05:13 crc kubenswrapper[4742]: I1205 07:05:13.294850 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-f9r8x" event={"ID":"7185d504-d8ed-450a-af74-ac742b24bb6d","Type":"ContainerDied","Data":"04c6d3b06776bceaf7510f53f8cbc378ccb1e4e98921c6240dccec4507f9b75f"} Dec 05 07:05:13 crc kubenswrapper[4742]: I1205 07:05:13.886424 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-f9r8x" Dec 05 07:05:14 crc kubenswrapper[4742]: I1205 07:05:14.002440 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7185d504-d8ed-450a-af74-ac742b24bb6d-utilities\") pod \"7185d504-d8ed-450a-af74-ac742b24bb6d\" (UID: \"7185d504-d8ed-450a-af74-ac742b24bb6d\") " Dec 05 07:05:14 crc kubenswrapper[4742]: I1205 07:05:14.002542 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2ws25\" (UniqueName: \"kubernetes.io/projected/7185d504-d8ed-450a-af74-ac742b24bb6d-kube-api-access-2ws25\") pod \"7185d504-d8ed-450a-af74-ac742b24bb6d\" (UID: \"7185d504-d8ed-450a-af74-ac742b24bb6d\") " Dec 05 07:05:14 crc kubenswrapper[4742]: I1205 07:05:14.002599 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7185d504-d8ed-450a-af74-ac742b24bb6d-catalog-content\") pod \"7185d504-d8ed-450a-af74-ac742b24bb6d\" (UID: \"7185d504-d8ed-450a-af74-ac742b24bb6d\") " Dec 05 07:05:14 crc kubenswrapper[4742]: I1205 07:05:14.003572 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7185d504-d8ed-450a-af74-ac742b24bb6d-utilities" (OuterVolumeSpecName: "utilities") pod "7185d504-d8ed-450a-af74-ac742b24bb6d" (UID: "7185d504-d8ed-450a-af74-ac742b24bb6d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:05:14 crc kubenswrapper[4742]: I1205 07:05:14.009019 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7185d504-d8ed-450a-af74-ac742b24bb6d-kube-api-access-2ws25" (OuterVolumeSpecName: "kube-api-access-2ws25") pod "7185d504-d8ed-450a-af74-ac742b24bb6d" (UID: "7185d504-d8ed-450a-af74-ac742b24bb6d"). InnerVolumeSpecName "kube-api-access-2ws25". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:05:14 crc kubenswrapper[4742]: I1205 07:05:14.081021 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7185d504-d8ed-450a-af74-ac742b24bb6d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7185d504-d8ed-450a-af74-ac742b24bb6d" (UID: "7185d504-d8ed-450a-af74-ac742b24bb6d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:05:14 crc kubenswrapper[4742]: I1205 07:05:14.104102 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7185d504-d8ed-450a-af74-ac742b24bb6d-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 07:05:14 crc kubenswrapper[4742]: I1205 07:05:14.104138 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2ws25\" (UniqueName: \"kubernetes.io/projected/7185d504-d8ed-450a-af74-ac742b24bb6d-kube-api-access-2ws25\") on node \"crc\" DevicePath \"\"" Dec 05 07:05:14 crc kubenswrapper[4742]: I1205 07:05:14.104152 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7185d504-d8ed-450a-af74-ac742b24bb6d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 07:05:14 crc kubenswrapper[4742]: I1205 07:05:14.308236 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-f9r8x" event={"ID":"7185d504-d8ed-450a-af74-ac742b24bb6d","Type":"ContainerDied","Data":"4ad88ae1fb317af38f69015b4c07b274771a5137a1ffd62ea63ee331973ba1ea"} Dec 05 07:05:14 crc kubenswrapper[4742]: I1205 07:05:14.308329 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-f9r8x" Dec 05 07:05:14 crc kubenswrapper[4742]: I1205 07:05:14.308337 4742 scope.go:117] "RemoveContainer" containerID="04c6d3b06776bceaf7510f53f8cbc378ccb1e4e98921c6240dccec4507f9b75f" Dec 05 07:05:14 crc kubenswrapper[4742]: I1205 07:05:14.353576 4742 scope.go:117] "RemoveContainer" containerID="b523bd603c6de04107b4733fdb0d254cbc701534b6749bd14343ef729972ec0a" Dec 05 07:05:14 crc kubenswrapper[4742]: I1205 07:05:14.363854 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-f9r8x"] Dec 05 07:05:14 crc kubenswrapper[4742]: I1205 07:05:14.374566 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-f9r8x"] Dec 05 07:05:14 crc kubenswrapper[4742]: I1205 07:05:14.390465 4742 scope.go:117] "RemoveContainer" containerID="bc7f6d0fa881804bcb557c0a7c9895f56205b835fd5efc14df244028ad667af1" Dec 05 07:05:14 crc kubenswrapper[4742]: I1205 07:05:14.398086 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7185d504-d8ed-450a-af74-ac742b24bb6d" path="/var/lib/kubelet/pods/7185d504-d8ed-450a-af74-ac742b24bb6d/volumes" Dec 05 07:05:16 crc kubenswrapper[4742]: I1205 07:05:16.671427 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:05:16 crc kubenswrapper[4742]: I1205 07:05:16.672937 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:05:46 crc kubenswrapper[4742]: I1205 07:05:46.671219 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:05:46 crc kubenswrapper[4742]: I1205 07:05:46.671730 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:05:55 crc kubenswrapper[4742]: I1205 07:05:55.000976 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-z4nsk/must-gather-tgwzw"] Dec 05 07:05:55 crc kubenswrapper[4742]: E1205 07:05:55.001719 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7185d504-d8ed-450a-af74-ac742b24bb6d" containerName="extract-content" Dec 05 07:05:55 crc kubenswrapper[4742]: I1205 07:05:55.001731 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="7185d504-d8ed-450a-af74-ac742b24bb6d" containerName="extract-content" Dec 05 07:05:55 crc kubenswrapper[4742]: E1205 07:05:55.001746 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7185d504-d8ed-450a-af74-ac742b24bb6d" containerName="registry-server" Dec 05 07:05:55 crc kubenswrapper[4742]: I1205 07:05:55.001752 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="7185d504-d8ed-450a-af74-ac742b24bb6d" containerName="registry-server" Dec 05 07:05:55 crc kubenswrapper[4742]: E1205 07:05:55.001774 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7185d504-d8ed-450a-af74-ac742b24bb6d" containerName="extract-utilities" Dec 05 07:05:55 crc kubenswrapper[4742]: I1205 07:05:55.001781 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="7185d504-d8ed-450a-af74-ac742b24bb6d" containerName="extract-utilities" Dec 05 07:05:55 crc kubenswrapper[4742]: I1205 07:05:55.001903 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="7185d504-d8ed-450a-af74-ac742b24bb6d" containerName="registry-server" Dec 05 07:05:55 crc kubenswrapper[4742]: I1205 07:05:55.002617 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-z4nsk/must-gather-tgwzw" Dec 05 07:05:55 crc kubenswrapper[4742]: I1205 07:05:55.004982 4742 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-z4nsk"/"default-dockercfg-tfnpb" Dec 05 07:05:55 crc kubenswrapper[4742]: I1205 07:05:55.005459 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-z4nsk"/"kube-root-ca.crt" Dec 05 07:05:55 crc kubenswrapper[4742]: I1205 07:05:55.005730 4742 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-z4nsk"/"openshift-service-ca.crt" Dec 05 07:05:55 crc kubenswrapper[4742]: I1205 07:05:55.011718 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-z4nsk/must-gather-tgwzw"] Dec 05 07:05:55 crc kubenswrapper[4742]: I1205 07:05:55.138148 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjjzv\" (UniqueName: \"kubernetes.io/projected/e5b216b7-f4e7-4441-83b2-22cb654aad9e-kube-api-access-pjjzv\") pod \"must-gather-tgwzw\" (UID: \"e5b216b7-f4e7-4441-83b2-22cb654aad9e\") " pod="openshift-must-gather-z4nsk/must-gather-tgwzw" Dec 05 07:05:55 crc kubenswrapper[4742]: I1205 07:05:55.138207 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/e5b216b7-f4e7-4441-83b2-22cb654aad9e-must-gather-output\") pod \"must-gather-tgwzw\" (UID: \"e5b216b7-f4e7-4441-83b2-22cb654aad9e\") " pod="openshift-must-gather-z4nsk/must-gather-tgwzw" Dec 05 07:05:55 crc kubenswrapper[4742]: I1205 07:05:55.239425 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjjzv\" (UniqueName: \"kubernetes.io/projected/e5b216b7-f4e7-4441-83b2-22cb654aad9e-kube-api-access-pjjzv\") pod \"must-gather-tgwzw\" (UID: \"e5b216b7-f4e7-4441-83b2-22cb654aad9e\") " pod="openshift-must-gather-z4nsk/must-gather-tgwzw" Dec 05 07:05:55 crc kubenswrapper[4742]: I1205 07:05:55.239496 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/e5b216b7-f4e7-4441-83b2-22cb654aad9e-must-gather-output\") pod \"must-gather-tgwzw\" (UID: \"e5b216b7-f4e7-4441-83b2-22cb654aad9e\") " pod="openshift-must-gather-z4nsk/must-gather-tgwzw" Dec 05 07:05:55 crc kubenswrapper[4742]: I1205 07:05:55.240422 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/e5b216b7-f4e7-4441-83b2-22cb654aad9e-must-gather-output\") pod \"must-gather-tgwzw\" (UID: \"e5b216b7-f4e7-4441-83b2-22cb654aad9e\") " pod="openshift-must-gather-z4nsk/must-gather-tgwzw" Dec 05 07:05:55 crc kubenswrapper[4742]: I1205 07:05:55.257939 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjjzv\" (UniqueName: \"kubernetes.io/projected/e5b216b7-f4e7-4441-83b2-22cb654aad9e-kube-api-access-pjjzv\") pod \"must-gather-tgwzw\" (UID: \"e5b216b7-f4e7-4441-83b2-22cb654aad9e\") " pod="openshift-must-gather-z4nsk/must-gather-tgwzw" Dec 05 07:05:55 crc kubenswrapper[4742]: I1205 07:05:55.324758 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-z4nsk/must-gather-tgwzw" Dec 05 07:05:55 crc kubenswrapper[4742]: I1205 07:05:55.614008 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-z4nsk/must-gather-tgwzw"] Dec 05 07:05:55 crc kubenswrapper[4742]: W1205 07:05:55.626585 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode5b216b7_f4e7_4441_83b2_22cb654aad9e.slice/crio-01f25b9cd956bdd4f81ffbef08091cdbd7e8f0903941b7242ef77c76787adafd WatchSource:0}: Error finding container 01f25b9cd956bdd4f81ffbef08091cdbd7e8f0903941b7242ef77c76787adafd: Status 404 returned error can't find the container with id 01f25b9cd956bdd4f81ffbef08091cdbd7e8f0903941b7242ef77c76787adafd Dec 05 07:05:55 crc kubenswrapper[4742]: I1205 07:05:55.629595 4742 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 07:05:55 crc kubenswrapper[4742]: I1205 07:05:55.855305 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-z4nsk/must-gather-tgwzw" event={"ID":"e5b216b7-f4e7-4441-83b2-22cb654aad9e","Type":"ContainerStarted","Data":"01f25b9cd956bdd4f81ffbef08091cdbd7e8f0903941b7242ef77c76787adafd"} Dec 05 07:06:00 crc kubenswrapper[4742]: I1205 07:06:00.915824 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-z4nsk/must-gather-tgwzw" event={"ID":"e5b216b7-f4e7-4441-83b2-22cb654aad9e","Type":"ContainerStarted","Data":"4514710bf6931e4c25b80eb46bbfff5aa018594e24cf22c44c3e05848cdd4380"} Dec 05 07:06:00 crc kubenswrapper[4742]: I1205 07:06:00.916355 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-z4nsk/must-gather-tgwzw" event={"ID":"e5b216b7-f4e7-4441-83b2-22cb654aad9e","Type":"ContainerStarted","Data":"54fab92f1b75eabf94a4d5335fb108e672ae43848c78810d045dcbf79388a8d3"} Dec 05 07:06:00 crc kubenswrapper[4742]: I1205 07:06:00.938848 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-z4nsk/must-gather-tgwzw" podStartSLOduration=2.424348749 podStartE2EDuration="6.938827384s" podCreationTimestamp="2025-12-05 07:05:54 +0000 UTC" firstStartedPulling="2025-12-05 07:05:55.62921496 +0000 UTC m=+4431.541350052" lastFinishedPulling="2025-12-05 07:06:00.143693625 +0000 UTC m=+4436.055828687" observedRunningTime="2025-12-05 07:06:00.933687878 +0000 UTC m=+4436.845822980" watchObservedRunningTime="2025-12-05 07:06:00.938827384 +0000 UTC m=+4436.850962456" Dec 05 07:06:16 crc kubenswrapper[4742]: I1205 07:06:16.671472 4742 patch_prober.go:28] interesting pod/machine-config-daemon-7q8lw container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 07:06:16 crc kubenswrapper[4742]: I1205 07:06:16.672178 4742 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 07:06:16 crc kubenswrapper[4742]: I1205 07:06:16.672248 4742 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" Dec 05 07:06:16 crc kubenswrapper[4742]: I1205 07:06:16.673385 4742 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9d0dba8a5003e3eaa64e3420091b1d41c9890973aa4238f35e0252d59230877b"} pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 07:06:16 crc kubenswrapper[4742]: I1205 07:06:16.673518 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerName="machine-config-daemon" containerID="cri-o://9d0dba8a5003e3eaa64e3420091b1d41c9890973aa4238f35e0252d59230877b" gracePeriod=600 Dec 05 07:06:17 crc kubenswrapper[4742]: E1205 07:06:17.899754 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 07:06:18 crc kubenswrapper[4742]: I1205 07:06:18.043926 4742 generic.go:334] "Generic (PLEG): container finished" podID="3fc0b032-e995-4d0f-b5e7-600b880849f5" containerID="9d0dba8a5003e3eaa64e3420091b1d41c9890973aa4238f35e0252d59230877b" exitCode=0 Dec 05 07:06:18 crc kubenswrapper[4742]: I1205 07:06:18.043996 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerDied","Data":"9d0dba8a5003e3eaa64e3420091b1d41c9890973aa4238f35e0252d59230877b"} Dec 05 07:06:18 crc kubenswrapper[4742]: I1205 07:06:18.044058 4742 scope.go:117] "RemoveContainer" containerID="cb9a27d77770ffff4e58c7fb9055f8e2b0e937d58b32eb42a5731a840005f97c" Dec 05 07:06:18 crc kubenswrapper[4742]: I1205 07:06:18.044499 4742 scope.go:117] "RemoveContainer" containerID="9d0dba8a5003e3eaa64e3420091b1d41c9890973aa4238f35e0252d59230877b" Dec 05 07:06:18 crc kubenswrapper[4742]: E1205 07:06:18.044728 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 07:06:30 crc kubenswrapper[4742]: I1205 07:06:30.382423 4742 scope.go:117] "RemoveContainer" containerID="9d0dba8a5003e3eaa64e3420091b1d41c9890973aa4238f35e0252d59230877b" Dec 05 07:06:30 crc kubenswrapper[4742]: E1205 07:06:30.383031 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 07:06:45 crc kubenswrapper[4742]: I1205 07:06:45.382866 4742 scope.go:117] "RemoveContainer" containerID="9d0dba8a5003e3eaa64e3420091b1d41c9890973aa4238f35e0252d59230877b" Dec 05 07:06:45 crc kubenswrapper[4742]: E1205 07:06:45.383866 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 07:06:59 crc kubenswrapper[4742]: I1205 07:06:59.014927 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46_8a14d1cc-1601-458f-97f6-01c3d6a95510/util/0.log" Dec 05 07:06:59 crc kubenswrapper[4742]: I1205 07:06:59.174302 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46_8a14d1cc-1601-458f-97f6-01c3d6a95510/pull/0.log" Dec 05 07:06:59 crc kubenswrapper[4742]: I1205 07:06:59.197364 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46_8a14d1cc-1601-458f-97f6-01c3d6a95510/util/0.log" Dec 05 07:06:59 crc kubenswrapper[4742]: I1205 07:06:59.205257 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46_8a14d1cc-1601-458f-97f6-01c3d6a95510/pull/0.log" Dec 05 07:06:59 crc kubenswrapper[4742]: I1205 07:06:59.326390 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46_8a14d1cc-1601-458f-97f6-01c3d6a95510/util/0.log" Dec 05 07:06:59 crc kubenswrapper[4742]: I1205 07:06:59.344911 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46_8a14d1cc-1601-458f-97f6-01c3d6a95510/pull/0.log" Dec 05 07:06:59 crc kubenswrapper[4742]: I1205 07:06:59.382974 4742 scope.go:117] "RemoveContainer" containerID="9d0dba8a5003e3eaa64e3420091b1d41c9890973aa4238f35e0252d59230877b" Dec 05 07:06:59 crc kubenswrapper[4742]: E1205 07:06:59.383307 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 07:06:59 crc kubenswrapper[4742]: I1205 07:06:59.398514 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_7ee1904b0b36c5ac2b910f2b6a74ab6f6a37cc37d391b42d90c2aa632abbb46_8a14d1cc-1601-458f-97f6-01c3d6a95510/extract/0.log" Dec 05 07:06:59 crc kubenswrapper[4742]: I1205 07:06:59.494729 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-gmqkf_491b9b94-2e41-4c0b-8286-6c7c8b460933/kube-rbac-proxy/0.log" Dec 05 07:06:59 crc kubenswrapper[4742]: I1205 07:06:59.564215 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-gmqkf_491b9b94-2e41-4c0b-8286-6c7c8b460933/manager/0.log" Dec 05 07:06:59 crc kubenswrapper[4742]: I1205 07:06:59.588552 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-6c677c69b-7t6zt_d1b68a24-f581-4b06-a05a-be291467b34b/kube-rbac-proxy/0.log" Dec 05 07:06:59 crc kubenswrapper[4742]: I1205 07:06:59.725300 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-6c677c69b-7t6zt_d1b68a24-f581-4b06-a05a-be291467b34b/manager/0.log" Dec 05 07:06:59 crc kubenswrapper[4742]: I1205 07:06:59.741595 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-697fb699cf-dp5sb_55a3d509-dd87-42fb-be01-6cdd6ffcc70c/kube-rbac-proxy/0.log" Dec 05 07:06:59 crc kubenswrapper[4742]: I1205 07:06:59.799253 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-697fb699cf-dp5sb_55a3d509-dd87-42fb-be01-6cdd6ffcc70c/manager/0.log" Dec 05 07:06:59 crc kubenswrapper[4742]: I1205 07:06:59.917670 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-5697bb5779-2c5fr_c57dd655-4793-45cd-9e28-ebf4793af611/kube-rbac-proxy/0.log" Dec 05 07:07:00 crc kubenswrapper[4742]: I1205 07:07:00.021963 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-5697bb5779-2c5fr_c57dd655-4793-45cd-9e28-ebf4793af611/manager/0.log" Dec 05 07:07:00 crc kubenswrapper[4742]: I1205 07:07:00.118757 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-2sqlp_6b229432-3291-4696-bc76-eda16eda1a3d/manager/0.log" Dec 05 07:07:00 crc kubenswrapper[4742]: I1205 07:07:00.141604 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-2sqlp_6b229432-3291-4696-bc76-eda16eda1a3d/kube-rbac-proxy/0.log" Dec 05 07:07:00 crc kubenswrapper[4742]: I1205 07:07:00.239929 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-9nxmx_030626ef-00d4-4b99-b629-0b25c15c2c55/kube-rbac-proxy/0.log" Dec 05 07:07:00 crc kubenswrapper[4742]: I1205 07:07:00.269253 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-9nxmx_030626ef-00d4-4b99-b629-0b25c15c2c55/manager/0.log" Dec 05 07:07:00 crc kubenswrapper[4742]: I1205 07:07:00.421602 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-758b7cbd9c-4bwlv_9d7f230e-fc9c-46a0-b31f-2b0772107ebb/kube-rbac-proxy/0.log" Dec 05 07:07:00 crc kubenswrapper[4742]: I1205 07:07:00.532087 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-967d97867-d77rg_d8579acb-f382-474b-94ae-86a304ddcaec/kube-rbac-proxy/0.log" Dec 05 07:07:00 crc kubenswrapper[4742]: I1205 07:07:00.575910 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-758b7cbd9c-4bwlv_9d7f230e-fc9c-46a0-b31f-2b0772107ebb/manager/0.log" Dec 05 07:07:00 crc kubenswrapper[4742]: I1205 07:07:00.618784 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-967d97867-d77rg_d8579acb-f382-474b-94ae-86a304ddcaec/manager/0.log" Dec 05 07:07:00 crc kubenswrapper[4742]: I1205 07:07:00.672285 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-kbvkg_b9dba9a5-804f-4b60-9e89-0e9dfeba1d44/kube-rbac-proxy/0.log" Dec 05 07:07:00 crc kubenswrapper[4742]: I1205 07:07:00.813262 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-kbvkg_b9dba9a5-804f-4b60-9e89-0e9dfeba1d44/manager/0.log" Dec 05 07:07:00 crc kubenswrapper[4742]: I1205 07:07:00.851045 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-m2g95_93299aa7-920e-4725-9546-1376e21f8652/kube-rbac-proxy/0.log" Dec 05 07:07:00 crc kubenswrapper[4742]: I1205 07:07:00.926650 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-m2g95_93299aa7-920e-4725-9546-1376e21f8652/manager/0.log" Dec 05 07:07:01 crc kubenswrapper[4742]: I1205 07:07:01.019904 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-79c8c4686c-4xvmq_3cd456e8-3d67-43bb-9aaf-006acae0a913/kube-rbac-proxy/0.log" Dec 05 07:07:01 crc kubenswrapper[4742]: I1205 07:07:01.074825 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-79c8c4686c-4xvmq_3cd456e8-3d67-43bb-9aaf-006acae0a913/manager/0.log" Dec 05 07:07:01 crc kubenswrapper[4742]: I1205 07:07:01.173113 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-2qgvf_87b6fb22-4077-4dfa-a66c-10ef740b542c/kube-rbac-proxy/0.log" Dec 05 07:07:01 crc kubenswrapper[4742]: I1205 07:07:01.230127 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-2qgvf_87b6fb22-4077-4dfa-a66c-10ef740b542c/manager/0.log" Dec 05 07:07:01 crc kubenswrapper[4742]: I1205 07:07:01.304148 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-mq7jm_dbf44717-3f12-426c-9133-ef0dd76cea1a/kube-rbac-proxy/0.log" Dec 05 07:07:01 crc kubenswrapper[4742]: I1205 07:07:01.404623 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-mq7jm_dbf44717-3f12-426c-9133-ef0dd76cea1a/manager/0.log" Dec 05 07:07:01 crc kubenswrapper[4742]: I1205 07:07:01.487641 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-xxqrg_47af7008-5488-4a6a-836a-602844f186c9/manager/0.log" Dec 05 07:07:01 crc kubenswrapper[4742]: I1205 07:07:01.489394 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-xxqrg_47af7008-5488-4a6a-836a-602844f186c9/kube-rbac-proxy/0.log" Dec 05 07:07:01 crc kubenswrapper[4742]: I1205 07:07:01.622669 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-84b575879fngsq9_96e75197-2f06-41ef-acca-0752e684ab72/kube-rbac-proxy/0.log" Dec 05 07:07:01 crc kubenswrapper[4742]: I1205 07:07:01.662142 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-84b575879fngsq9_96e75197-2f06-41ef-acca-0752e684ab72/manager/0.log" Dec 05 07:07:02 crc kubenswrapper[4742]: I1205 07:07:02.057298 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-554dbdfbd5-vlt2c_d3a0217e-be86-4205-a03c-fc3a3c603ebd/operator/0.log" Dec 05 07:07:02 crc kubenswrapper[4742]: I1205 07:07:02.071029 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-wf5gb_9f4bfd9b-9056-4096-804c-5cdf8a6a29d4/registry-server/0.log" Dec 05 07:07:02 crc kubenswrapper[4742]: I1205 07:07:02.232019 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-ml2jd_7296ffb7-3049-44eb-80d1-850817ee1fac/kube-rbac-proxy/0.log" Dec 05 07:07:02 crc kubenswrapper[4742]: I1205 07:07:02.352604 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-ml2jd_7296ffb7-3049-44eb-80d1-850817ee1fac/manager/0.log" Dec 05 07:07:02 crc kubenswrapper[4742]: I1205 07:07:02.407165 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-6f6696b64-d7l2w_1b690049-7bae-4629-8183-02c87c0fe640/manager/0.log" Dec 05 07:07:02 crc kubenswrapper[4742]: I1205 07:07:02.424276 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-8vrd9_8b68f95e-f3d8-4e0e-a1a7-f5769e47f3b1/kube-rbac-proxy/0.log" Dec 05 07:07:02 crc kubenswrapper[4742]: I1205 07:07:02.494889 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-8vrd9_8b68f95e-f3d8-4e0e-a1a7-f5769e47f3b1/manager/0.log" Dec 05 07:07:02 crc kubenswrapper[4742]: I1205 07:07:02.616475 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-5nmbr_a66da454-e3a7-436a-88d0-05bcf3e954eb/operator/0.log" Dec 05 07:07:02 crc kubenswrapper[4742]: I1205 07:07:02.632388 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-9d58d64bc-xvrrk_80c34a09-8c71-40d2-828e-b5e416ca4e5d/kube-rbac-proxy/0.log" Dec 05 07:07:02 crc kubenswrapper[4742]: I1205 07:07:02.729912 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-9d58d64bc-xvrrk_80c34a09-8c71-40d2-828e-b5e416ca4e5d/manager/0.log" Dec 05 07:07:02 crc kubenswrapper[4742]: I1205 07:07:02.785523 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-58d5ff84df-tqxd6_1b8c3dc7-6086-4c81-a1f7-a9bea62a8a4f/kube-rbac-proxy/0.log" Dec 05 07:07:02 crc kubenswrapper[4742]: I1205 07:07:02.885733 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-58d5ff84df-tqxd6_1b8c3dc7-6086-4c81-a1f7-a9bea62a8a4f/manager/0.log" Dec 05 07:07:02 crc kubenswrapper[4742]: I1205 07:07:02.905110 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-xb5zg_961ed339-23e1-4d90-a5b9-f0fcdd73df76/kube-rbac-proxy/0.log" Dec 05 07:07:02 crc kubenswrapper[4742]: I1205 07:07:02.974630 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-xb5zg_961ed339-23e1-4d90-a5b9-f0fcdd73df76/manager/0.log" Dec 05 07:07:03 crc kubenswrapper[4742]: I1205 07:07:03.059728 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-667bd8d554-rc6hb_47ef9cc3-82c3-4874-8fbf-9799bb2a8b4c/kube-rbac-proxy/0.log" Dec 05 07:07:03 crc kubenswrapper[4742]: I1205 07:07:03.070407 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-667bd8d554-rc6hb_47ef9cc3-82c3-4874-8fbf-9799bb2a8b4c/manager/0.log" Dec 05 07:07:13 crc kubenswrapper[4742]: I1205 07:07:13.383151 4742 scope.go:117] "RemoveContainer" containerID="9d0dba8a5003e3eaa64e3420091b1d41c9890973aa4238f35e0252d59230877b" Dec 05 07:07:13 crc kubenswrapper[4742]: E1205 07:07:13.383946 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 07:07:22 crc kubenswrapper[4742]: I1205 07:07:22.000246 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-rsr6b_93cbfc1e-ff82-4309-a7a8-dd57f1fc6616/control-plane-machine-set-operator/0.log" Dec 05 07:07:22 crc kubenswrapper[4742]: I1205 07:07:22.220334 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-zk4vj_518731b7-0f61-40b4-ad6c-c49383c0dd5b/kube-rbac-proxy/0.log" Dec 05 07:07:22 crc kubenswrapper[4742]: I1205 07:07:22.318785 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-zk4vj_518731b7-0f61-40b4-ad6c-c49383c0dd5b/machine-api-operator/0.log" Dec 05 07:07:24 crc kubenswrapper[4742]: I1205 07:07:24.385996 4742 scope.go:117] "RemoveContainer" containerID="9d0dba8a5003e3eaa64e3420091b1d41c9890973aa4238f35e0252d59230877b" Dec 05 07:07:24 crc kubenswrapper[4742]: E1205 07:07:24.386553 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 07:07:36 crc kubenswrapper[4742]: I1205 07:07:36.079793 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-rd89x_9e4497f9-5c79-4ab4-b6fd-501ba13e3ede/cert-manager-controller/0.log" Dec 05 07:07:36 crc kubenswrapper[4742]: I1205 07:07:36.264392 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-kvfb8_00f1d295-623d-4eb6-949b-f51674189d91/cert-manager-cainjector/0.log" Dec 05 07:07:36 crc kubenswrapper[4742]: I1205 07:07:36.304457 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-6ggsv_74f061b9-beb2-4a01-bfd1-d0b1ad1ffe24/cert-manager-webhook/0.log" Dec 05 07:07:37 crc kubenswrapper[4742]: I1205 07:07:37.383032 4742 scope.go:117] "RemoveContainer" containerID="9d0dba8a5003e3eaa64e3420091b1d41c9890973aa4238f35e0252d59230877b" Dec 05 07:07:37 crc kubenswrapper[4742]: E1205 07:07:37.383298 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 07:07:49 crc kubenswrapper[4742]: I1205 07:07:49.382627 4742 scope.go:117] "RemoveContainer" containerID="9d0dba8a5003e3eaa64e3420091b1d41c9890973aa4238f35e0252d59230877b" Dec 05 07:07:49 crc kubenswrapper[4742]: E1205 07:07:49.383422 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 07:07:49 crc kubenswrapper[4742]: I1205 07:07:49.690721 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-jsbxs_b2b02f02-8470-4c8a-9f75-774905266432/nmstate-console-plugin/0.log" Dec 05 07:07:49 crc kubenswrapper[4742]: I1205 07:07:49.847047 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-l8d88_dc1bcd64-15c2-4fec-ac72-167371e50892/nmstate-handler/0.log" Dec 05 07:07:49 crc kubenswrapper[4742]: I1205 07:07:49.888507 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-4xvgt_e78be74b-15df-44da-9b82-909e008442b0/kube-rbac-proxy/0.log" Dec 05 07:07:49 crc kubenswrapper[4742]: I1205 07:07:49.937184 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-4xvgt_e78be74b-15df-44da-9b82-909e008442b0/nmstate-metrics/0.log" Dec 05 07:07:50 crc kubenswrapper[4742]: I1205 07:07:50.037001 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-n5mqx_fc296790-fea6-441b-93fc-6e4caed21ba3/nmstate-operator/0.log" Dec 05 07:07:50 crc kubenswrapper[4742]: I1205 07:07:50.097717 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-7zxqr_35a6361c-11cd-440d-ad6e-93929d21e8f2/nmstate-webhook/0.log" Dec 05 07:08:04 crc kubenswrapper[4742]: I1205 07:08:04.386972 4742 scope.go:117] "RemoveContainer" containerID="9d0dba8a5003e3eaa64e3420091b1d41c9890973aa4238f35e0252d59230877b" Dec 05 07:08:04 crc kubenswrapper[4742]: E1205 07:08:04.387615 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 07:08:04 crc kubenswrapper[4742]: I1205 07:08:04.502729 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-4rdck_4f231cac-c8b3-4d09-a4db-b936ea626b09/kube-rbac-proxy/0.log" Dec 05 07:08:04 crc kubenswrapper[4742]: I1205 07:08:04.691637 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2mwsj_eb609210-85a4-48b1-94fc-bd35d13b9c3d/cp-frr-files/0.log" Dec 05 07:08:04 crc kubenswrapper[4742]: I1205 07:08:04.923137 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-4rdck_4f231cac-c8b3-4d09-a4db-b936ea626b09/controller/0.log" Dec 05 07:08:04 crc kubenswrapper[4742]: I1205 07:08:04.925652 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2mwsj_eb609210-85a4-48b1-94fc-bd35d13b9c3d/cp-reloader/0.log" Dec 05 07:08:04 crc kubenswrapper[4742]: I1205 07:08:04.966848 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2mwsj_eb609210-85a4-48b1-94fc-bd35d13b9c3d/cp-frr-files/0.log" Dec 05 07:08:05 crc kubenswrapper[4742]: I1205 07:08:05.011455 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2mwsj_eb609210-85a4-48b1-94fc-bd35d13b9c3d/cp-metrics/0.log" Dec 05 07:08:05 crc kubenswrapper[4742]: I1205 07:08:05.101330 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2mwsj_eb609210-85a4-48b1-94fc-bd35d13b9c3d/cp-reloader/0.log" Dec 05 07:08:05 crc kubenswrapper[4742]: I1205 07:08:05.237645 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2mwsj_eb609210-85a4-48b1-94fc-bd35d13b9c3d/cp-reloader/0.log" Dec 05 07:08:05 crc kubenswrapper[4742]: I1205 07:08:05.242168 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2mwsj_eb609210-85a4-48b1-94fc-bd35d13b9c3d/cp-frr-files/0.log" Dec 05 07:08:05 crc kubenswrapper[4742]: I1205 07:08:05.259835 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2mwsj_eb609210-85a4-48b1-94fc-bd35d13b9c3d/cp-metrics/0.log" Dec 05 07:08:05 crc kubenswrapper[4742]: I1205 07:08:05.303517 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2mwsj_eb609210-85a4-48b1-94fc-bd35d13b9c3d/cp-metrics/0.log" Dec 05 07:08:05 crc kubenswrapper[4742]: I1205 07:08:05.482543 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2mwsj_eb609210-85a4-48b1-94fc-bd35d13b9c3d/cp-metrics/0.log" Dec 05 07:08:05 crc kubenswrapper[4742]: I1205 07:08:05.500091 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2mwsj_eb609210-85a4-48b1-94fc-bd35d13b9c3d/cp-frr-files/0.log" Dec 05 07:08:05 crc kubenswrapper[4742]: I1205 07:08:05.501008 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2mwsj_eb609210-85a4-48b1-94fc-bd35d13b9c3d/cp-reloader/0.log" Dec 05 07:08:05 crc kubenswrapper[4742]: I1205 07:08:05.504987 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2mwsj_eb609210-85a4-48b1-94fc-bd35d13b9c3d/controller/0.log" Dec 05 07:08:05 crc kubenswrapper[4742]: I1205 07:08:05.660723 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2mwsj_eb609210-85a4-48b1-94fc-bd35d13b9c3d/frr-metrics/0.log" Dec 05 07:08:05 crc kubenswrapper[4742]: I1205 07:08:05.672541 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2mwsj_eb609210-85a4-48b1-94fc-bd35d13b9c3d/kube-rbac-proxy/0.log" Dec 05 07:08:05 crc kubenswrapper[4742]: I1205 07:08:05.746194 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2mwsj_eb609210-85a4-48b1-94fc-bd35d13b9c3d/kube-rbac-proxy-frr/0.log" Dec 05 07:08:05 crc kubenswrapper[4742]: I1205 07:08:05.911073 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2mwsj_eb609210-85a4-48b1-94fc-bd35d13b9c3d/reloader/0.log" Dec 05 07:08:05 crc kubenswrapper[4742]: I1205 07:08:05.993415 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-qnzzl_1e968846-ee36-497c-b325-b3fb9a719dd5/frr-k8s-webhook-server/0.log" Dec 05 07:08:06 crc kubenswrapper[4742]: I1205 07:08:06.182063 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-58c6bb7977-9wmg7_1aabe43c-f43a-4355-bff2-0cea43761b1f/manager/0.log" Dec 05 07:08:06 crc kubenswrapper[4742]: I1205 07:08:06.333668 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-c7469647b-vlc7v_811775cd-1d30-4b17-aa34-63ce86817f71/webhook-server/0.log" Dec 05 07:08:06 crc kubenswrapper[4742]: I1205 07:08:06.430724 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-hv4z4_6e970302-163f-4c96-9be1-740136174111/kube-rbac-proxy/0.log" Dec 05 07:08:06 crc kubenswrapper[4742]: I1205 07:08:06.871955 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2mwsj_eb609210-85a4-48b1-94fc-bd35d13b9c3d/frr/0.log" Dec 05 07:08:06 crc kubenswrapper[4742]: I1205 07:08:06.903796 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-hv4z4_6e970302-163f-4c96-9be1-740136174111/speaker/0.log" Dec 05 07:08:15 crc kubenswrapper[4742]: I1205 07:08:15.382145 4742 scope.go:117] "RemoveContainer" containerID="9d0dba8a5003e3eaa64e3420091b1d41c9890973aa4238f35e0252d59230877b" Dec 05 07:08:15 crc kubenswrapper[4742]: E1205 07:08:15.382820 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 07:08:19 crc kubenswrapper[4742]: I1205 07:08:19.797908 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs_2401898b-c4d3-4155-b6af-4ed889d837d5/util/0.log" Dec 05 07:08:19 crc kubenswrapper[4742]: I1205 07:08:19.970199 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs_2401898b-c4d3-4155-b6af-4ed889d837d5/util/0.log" Dec 05 07:08:19 crc kubenswrapper[4742]: I1205 07:08:19.987788 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs_2401898b-c4d3-4155-b6af-4ed889d837d5/pull/0.log" Dec 05 07:08:20 crc kubenswrapper[4742]: I1205 07:08:20.005967 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs_2401898b-c4d3-4155-b6af-4ed889d837d5/pull/0.log" Dec 05 07:08:20 crc kubenswrapper[4742]: I1205 07:08:20.212854 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs_2401898b-c4d3-4155-b6af-4ed889d837d5/extract/0.log" Dec 05 07:08:20 crc kubenswrapper[4742]: I1205 07:08:20.220092 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs_2401898b-c4d3-4155-b6af-4ed889d837d5/util/0.log" Dec 05 07:08:20 crc kubenswrapper[4742]: I1205 07:08:20.243097 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931amkdgs_2401898b-c4d3-4155-b6af-4ed889d837d5/pull/0.log" Dec 05 07:08:20 crc kubenswrapper[4742]: I1205 07:08:20.366243 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4_19fa719f-3ace-451f-ba24-3c9a3fc6bc2b/util/0.log" Dec 05 07:08:20 crc kubenswrapper[4742]: I1205 07:08:20.544249 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4_19fa719f-3ace-451f-ba24-3c9a3fc6bc2b/util/0.log" Dec 05 07:08:20 crc kubenswrapper[4742]: I1205 07:08:20.655030 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4_19fa719f-3ace-451f-ba24-3c9a3fc6bc2b/pull/0.log" Dec 05 07:08:20 crc kubenswrapper[4742]: I1205 07:08:20.655161 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4_19fa719f-3ace-451f-ba24-3c9a3fc6bc2b/pull/0.log" Dec 05 07:08:20 crc kubenswrapper[4742]: I1205 07:08:20.783244 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4_19fa719f-3ace-451f-ba24-3c9a3fc6bc2b/pull/0.log" Dec 05 07:08:20 crc kubenswrapper[4742]: I1205 07:08:20.798801 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4_19fa719f-3ace-451f-ba24-3c9a3fc6bc2b/util/0.log" Dec 05 07:08:20 crc kubenswrapper[4742]: I1205 07:08:20.803273 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fh7st4_19fa719f-3ace-451f-ba24-3c9a3fc6bc2b/extract/0.log" Dec 05 07:08:21 crc kubenswrapper[4742]: I1205 07:08:21.049323 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h_822fa448-d076-49f9-9467-2c912b88b081/util/0.log" Dec 05 07:08:21 crc kubenswrapper[4742]: I1205 07:08:21.218275 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h_822fa448-d076-49f9-9467-2c912b88b081/pull/0.log" Dec 05 07:08:21 crc kubenswrapper[4742]: I1205 07:08:21.223035 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h_822fa448-d076-49f9-9467-2c912b88b081/util/0.log" Dec 05 07:08:21 crc kubenswrapper[4742]: I1205 07:08:21.261393 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h_822fa448-d076-49f9-9467-2c912b88b081/pull/0.log" Dec 05 07:08:21 crc kubenswrapper[4742]: I1205 07:08:21.390306 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h_822fa448-d076-49f9-9467-2c912b88b081/util/0.log" Dec 05 07:08:21 crc kubenswrapper[4742]: I1205 07:08:21.440373 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h_822fa448-d076-49f9-9467-2c912b88b081/extract/0.log" Dec 05 07:08:21 crc kubenswrapper[4742]: I1205 07:08:21.440784 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g5j8h_822fa448-d076-49f9-9467-2c912b88b081/pull/0.log" Dec 05 07:08:21 crc kubenswrapper[4742]: I1205 07:08:21.585300 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-t26cm_96fc5c33-e057-48e7-9e20-3b8860f09a1f/extract-utilities/0.log" Dec 05 07:08:21 crc kubenswrapper[4742]: I1205 07:08:21.741508 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-t26cm_96fc5c33-e057-48e7-9e20-3b8860f09a1f/extract-utilities/0.log" Dec 05 07:08:21 crc kubenswrapper[4742]: I1205 07:08:21.742511 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-t26cm_96fc5c33-e057-48e7-9e20-3b8860f09a1f/extract-content/0.log" Dec 05 07:08:21 crc kubenswrapper[4742]: I1205 07:08:21.752898 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-t26cm_96fc5c33-e057-48e7-9e20-3b8860f09a1f/extract-content/0.log" Dec 05 07:08:21 crc kubenswrapper[4742]: I1205 07:08:21.898756 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-t26cm_96fc5c33-e057-48e7-9e20-3b8860f09a1f/extract-content/0.log" Dec 05 07:08:21 crc kubenswrapper[4742]: I1205 07:08:21.946968 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-t26cm_96fc5c33-e057-48e7-9e20-3b8860f09a1f/extract-utilities/0.log" Dec 05 07:08:22 crc kubenswrapper[4742]: I1205 07:08:22.174941 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dms6l_e4265f02-a9b1-4e0d-b568-e928700ff3f6/extract-utilities/0.log" Dec 05 07:08:22 crc kubenswrapper[4742]: I1205 07:08:22.499758 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dms6l_e4265f02-a9b1-4e0d-b568-e928700ff3f6/extract-content/0.log" Dec 05 07:08:22 crc kubenswrapper[4742]: I1205 07:08:22.527685 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dms6l_e4265f02-a9b1-4e0d-b568-e928700ff3f6/extract-utilities/0.log" Dec 05 07:08:22 crc kubenswrapper[4742]: I1205 07:08:22.538601 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dms6l_e4265f02-a9b1-4e0d-b568-e928700ff3f6/extract-content/0.log" Dec 05 07:08:22 crc kubenswrapper[4742]: I1205 07:08:22.638072 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-t26cm_96fc5c33-e057-48e7-9e20-3b8860f09a1f/registry-server/0.log" Dec 05 07:08:22 crc kubenswrapper[4742]: I1205 07:08:22.704533 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dms6l_e4265f02-a9b1-4e0d-b568-e928700ff3f6/extract-content/0.log" Dec 05 07:08:22 crc kubenswrapper[4742]: I1205 07:08:22.744700 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dms6l_e4265f02-a9b1-4e0d-b568-e928700ff3f6/extract-utilities/0.log" Dec 05 07:08:22 crc kubenswrapper[4742]: I1205 07:08:22.897003 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-qhk9s_1c4ffc9a-27a6-44a8-9537-bd58e3fc8b7d/marketplace-operator/0.log" Dec 05 07:08:23 crc kubenswrapper[4742]: I1205 07:08:23.076376 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nk79d_97e4e804-858b-4992-9bce-31ede1359c3e/extract-utilities/0.log" Dec 05 07:08:23 crc kubenswrapper[4742]: I1205 07:08:23.254614 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nk79d_97e4e804-858b-4992-9bce-31ede1359c3e/extract-utilities/0.log" Dec 05 07:08:23 crc kubenswrapper[4742]: I1205 07:08:23.351016 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nk79d_97e4e804-858b-4992-9bce-31ede1359c3e/extract-content/0.log" Dec 05 07:08:23 crc kubenswrapper[4742]: I1205 07:08:23.354883 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dms6l_e4265f02-a9b1-4e0d-b568-e928700ff3f6/registry-server/0.log" Dec 05 07:08:23 crc kubenswrapper[4742]: I1205 07:08:23.383653 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nk79d_97e4e804-858b-4992-9bce-31ede1359c3e/extract-content/0.log" Dec 05 07:08:23 crc kubenswrapper[4742]: I1205 07:08:23.522476 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nk79d_97e4e804-858b-4992-9bce-31ede1359c3e/extract-utilities/0.log" Dec 05 07:08:23 crc kubenswrapper[4742]: I1205 07:08:23.534356 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nk79d_97e4e804-858b-4992-9bce-31ede1359c3e/extract-content/0.log" Dec 05 07:08:23 crc kubenswrapper[4742]: I1205 07:08:23.619104 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wpdk5_c54462f3-33a2-4bf4-9601-5a321e633702/extract-utilities/0.log" Dec 05 07:08:23 crc kubenswrapper[4742]: I1205 07:08:23.706243 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-nk79d_97e4e804-858b-4992-9bce-31ede1359c3e/registry-server/0.log" Dec 05 07:08:23 crc kubenswrapper[4742]: I1205 07:08:23.760185 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wpdk5_c54462f3-33a2-4bf4-9601-5a321e633702/extract-utilities/0.log" Dec 05 07:08:23 crc kubenswrapper[4742]: I1205 07:08:23.827802 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wpdk5_c54462f3-33a2-4bf4-9601-5a321e633702/extract-content/0.log" Dec 05 07:08:23 crc kubenswrapper[4742]: I1205 07:08:23.838572 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wpdk5_c54462f3-33a2-4bf4-9601-5a321e633702/extract-content/0.log" Dec 05 07:08:23 crc kubenswrapper[4742]: I1205 07:08:23.978124 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wpdk5_c54462f3-33a2-4bf4-9601-5a321e633702/extract-content/0.log" Dec 05 07:08:24 crc kubenswrapper[4742]: I1205 07:08:24.008403 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wpdk5_c54462f3-33a2-4bf4-9601-5a321e633702/extract-utilities/0.log" Dec 05 07:08:24 crc kubenswrapper[4742]: I1205 07:08:24.505230 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wpdk5_c54462f3-33a2-4bf4-9601-5a321e633702/registry-server/0.log" Dec 05 07:08:26 crc kubenswrapper[4742]: I1205 07:08:26.382683 4742 scope.go:117] "RemoveContainer" containerID="9d0dba8a5003e3eaa64e3420091b1d41c9890973aa4238f35e0252d59230877b" Dec 05 07:08:26 crc kubenswrapper[4742]: E1205 07:08:26.383233 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 07:08:39 crc kubenswrapper[4742]: I1205 07:08:39.382452 4742 scope.go:117] "RemoveContainer" containerID="9d0dba8a5003e3eaa64e3420091b1d41c9890973aa4238f35e0252d59230877b" Dec 05 07:08:39 crc kubenswrapper[4742]: E1205 07:08:39.383200 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 07:08:53 crc kubenswrapper[4742]: I1205 07:08:53.382927 4742 scope.go:117] "RemoveContainer" containerID="9d0dba8a5003e3eaa64e3420091b1d41c9890973aa4238f35e0252d59230877b" Dec 05 07:08:53 crc kubenswrapper[4742]: E1205 07:08:53.383855 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 07:09:05 crc kubenswrapper[4742]: I1205 07:09:05.383335 4742 scope.go:117] "RemoveContainer" containerID="9d0dba8a5003e3eaa64e3420091b1d41c9890973aa4238f35e0252d59230877b" Dec 05 07:09:05 crc kubenswrapper[4742]: E1205 07:09:05.384428 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 07:09:17 crc kubenswrapper[4742]: I1205 07:09:17.383958 4742 scope.go:117] "RemoveContainer" containerID="9d0dba8a5003e3eaa64e3420091b1d41c9890973aa4238f35e0252d59230877b" Dec 05 07:09:17 crc kubenswrapper[4742]: E1205 07:09:17.385090 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 07:09:29 crc kubenswrapper[4742]: I1205 07:09:29.551274 4742 generic.go:334] "Generic (PLEG): container finished" podID="e5b216b7-f4e7-4441-83b2-22cb654aad9e" containerID="54fab92f1b75eabf94a4d5335fb108e672ae43848c78810d045dcbf79388a8d3" exitCode=0 Dec 05 07:09:29 crc kubenswrapper[4742]: I1205 07:09:29.551407 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-z4nsk/must-gather-tgwzw" event={"ID":"e5b216b7-f4e7-4441-83b2-22cb654aad9e","Type":"ContainerDied","Data":"54fab92f1b75eabf94a4d5335fb108e672ae43848c78810d045dcbf79388a8d3"} Dec 05 07:09:29 crc kubenswrapper[4742]: I1205 07:09:29.552928 4742 scope.go:117] "RemoveContainer" containerID="54fab92f1b75eabf94a4d5335fb108e672ae43848c78810d045dcbf79388a8d3" Dec 05 07:09:30 crc kubenswrapper[4742]: I1205 07:09:30.121682 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-z4nsk_must-gather-tgwzw_e5b216b7-f4e7-4441-83b2-22cb654aad9e/gather/0.log" Dec 05 07:09:31 crc kubenswrapper[4742]: I1205 07:09:31.382883 4742 scope.go:117] "RemoveContainer" containerID="9d0dba8a5003e3eaa64e3420091b1d41c9890973aa4238f35e0252d59230877b" Dec 05 07:09:31 crc kubenswrapper[4742]: E1205 07:09:31.383194 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 07:09:38 crc kubenswrapper[4742]: I1205 07:09:38.666442 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-z4nsk/must-gather-tgwzw"] Dec 05 07:09:38 crc kubenswrapper[4742]: I1205 07:09:38.667435 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-z4nsk/must-gather-tgwzw" podUID="e5b216b7-f4e7-4441-83b2-22cb654aad9e" containerName="copy" containerID="cri-o://4514710bf6931e4c25b80eb46bbfff5aa018594e24cf22c44c3e05848cdd4380" gracePeriod=2 Dec 05 07:09:38 crc kubenswrapper[4742]: I1205 07:09:38.673823 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-z4nsk/must-gather-tgwzw"] Dec 05 07:09:39 crc kubenswrapper[4742]: I1205 07:09:39.073569 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-z4nsk_must-gather-tgwzw_e5b216b7-f4e7-4441-83b2-22cb654aad9e/copy/0.log" Dec 05 07:09:39 crc kubenswrapper[4742]: I1205 07:09:39.074389 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-z4nsk/must-gather-tgwzw" Dec 05 07:09:39 crc kubenswrapper[4742]: I1205 07:09:39.204547 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjjzv\" (UniqueName: \"kubernetes.io/projected/e5b216b7-f4e7-4441-83b2-22cb654aad9e-kube-api-access-pjjzv\") pod \"e5b216b7-f4e7-4441-83b2-22cb654aad9e\" (UID: \"e5b216b7-f4e7-4441-83b2-22cb654aad9e\") " Dec 05 07:09:39 crc kubenswrapper[4742]: I1205 07:09:39.204595 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/e5b216b7-f4e7-4441-83b2-22cb654aad9e-must-gather-output\") pod \"e5b216b7-f4e7-4441-83b2-22cb654aad9e\" (UID: \"e5b216b7-f4e7-4441-83b2-22cb654aad9e\") " Dec 05 07:09:39 crc kubenswrapper[4742]: I1205 07:09:39.212434 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5b216b7-f4e7-4441-83b2-22cb654aad9e-kube-api-access-pjjzv" (OuterVolumeSpecName: "kube-api-access-pjjzv") pod "e5b216b7-f4e7-4441-83b2-22cb654aad9e" (UID: "e5b216b7-f4e7-4441-83b2-22cb654aad9e"). InnerVolumeSpecName "kube-api-access-pjjzv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:09:39 crc kubenswrapper[4742]: I1205 07:09:39.299392 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5b216b7-f4e7-4441-83b2-22cb654aad9e-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "e5b216b7-f4e7-4441-83b2-22cb654aad9e" (UID: "e5b216b7-f4e7-4441-83b2-22cb654aad9e"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:09:39 crc kubenswrapper[4742]: I1205 07:09:39.306292 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjjzv\" (UniqueName: \"kubernetes.io/projected/e5b216b7-f4e7-4441-83b2-22cb654aad9e-kube-api-access-pjjzv\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:39 crc kubenswrapper[4742]: I1205 07:09:39.306563 4742 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/e5b216b7-f4e7-4441-83b2-22cb654aad9e-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 05 07:09:39 crc kubenswrapper[4742]: I1205 07:09:39.636532 4742 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-z4nsk_must-gather-tgwzw_e5b216b7-f4e7-4441-83b2-22cb654aad9e/copy/0.log" Dec 05 07:09:39 crc kubenswrapper[4742]: I1205 07:09:39.637111 4742 generic.go:334] "Generic (PLEG): container finished" podID="e5b216b7-f4e7-4441-83b2-22cb654aad9e" containerID="4514710bf6931e4c25b80eb46bbfff5aa018594e24cf22c44c3e05848cdd4380" exitCode=143 Dec 05 07:09:39 crc kubenswrapper[4742]: I1205 07:09:39.637180 4742 scope.go:117] "RemoveContainer" containerID="4514710bf6931e4c25b80eb46bbfff5aa018594e24cf22c44c3e05848cdd4380" Dec 05 07:09:39 crc kubenswrapper[4742]: I1205 07:09:39.637263 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-z4nsk/must-gather-tgwzw" Dec 05 07:09:39 crc kubenswrapper[4742]: I1205 07:09:39.674543 4742 scope.go:117] "RemoveContainer" containerID="54fab92f1b75eabf94a4d5335fb108e672ae43848c78810d045dcbf79388a8d3" Dec 05 07:09:39 crc kubenswrapper[4742]: I1205 07:09:39.732610 4742 scope.go:117] "RemoveContainer" containerID="4514710bf6931e4c25b80eb46bbfff5aa018594e24cf22c44c3e05848cdd4380" Dec 05 07:09:39 crc kubenswrapper[4742]: E1205 07:09:39.733674 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4514710bf6931e4c25b80eb46bbfff5aa018594e24cf22c44c3e05848cdd4380\": container with ID starting with 4514710bf6931e4c25b80eb46bbfff5aa018594e24cf22c44c3e05848cdd4380 not found: ID does not exist" containerID="4514710bf6931e4c25b80eb46bbfff5aa018594e24cf22c44c3e05848cdd4380" Dec 05 07:09:39 crc kubenswrapper[4742]: I1205 07:09:39.733733 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4514710bf6931e4c25b80eb46bbfff5aa018594e24cf22c44c3e05848cdd4380"} err="failed to get container status \"4514710bf6931e4c25b80eb46bbfff5aa018594e24cf22c44c3e05848cdd4380\": rpc error: code = NotFound desc = could not find container \"4514710bf6931e4c25b80eb46bbfff5aa018594e24cf22c44c3e05848cdd4380\": container with ID starting with 4514710bf6931e4c25b80eb46bbfff5aa018594e24cf22c44c3e05848cdd4380 not found: ID does not exist" Dec 05 07:09:39 crc kubenswrapper[4742]: I1205 07:09:39.733767 4742 scope.go:117] "RemoveContainer" containerID="54fab92f1b75eabf94a4d5335fb108e672ae43848c78810d045dcbf79388a8d3" Dec 05 07:09:39 crc kubenswrapper[4742]: E1205 07:09:39.734161 4742 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54fab92f1b75eabf94a4d5335fb108e672ae43848c78810d045dcbf79388a8d3\": container with ID starting with 54fab92f1b75eabf94a4d5335fb108e672ae43848c78810d045dcbf79388a8d3 not found: ID does not exist" containerID="54fab92f1b75eabf94a4d5335fb108e672ae43848c78810d045dcbf79388a8d3" Dec 05 07:09:39 crc kubenswrapper[4742]: I1205 07:09:39.734198 4742 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54fab92f1b75eabf94a4d5335fb108e672ae43848c78810d045dcbf79388a8d3"} err="failed to get container status \"54fab92f1b75eabf94a4d5335fb108e672ae43848c78810d045dcbf79388a8d3\": rpc error: code = NotFound desc = could not find container \"54fab92f1b75eabf94a4d5335fb108e672ae43848c78810d045dcbf79388a8d3\": container with ID starting with 54fab92f1b75eabf94a4d5335fb108e672ae43848c78810d045dcbf79388a8d3 not found: ID does not exist" Dec 05 07:09:40 crc kubenswrapper[4742]: I1205 07:09:40.394318 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5b216b7-f4e7-4441-83b2-22cb654aad9e" path="/var/lib/kubelet/pods/e5b216b7-f4e7-4441-83b2-22cb654aad9e/volumes" Dec 05 07:09:45 crc kubenswrapper[4742]: I1205 07:09:45.382622 4742 scope.go:117] "RemoveContainer" containerID="9d0dba8a5003e3eaa64e3420091b1d41c9890973aa4238f35e0252d59230877b" Dec 05 07:09:45 crc kubenswrapper[4742]: E1205 07:09:45.383312 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 07:09:58 crc kubenswrapper[4742]: I1205 07:09:58.389119 4742 scope.go:117] "RemoveContainer" containerID="9d0dba8a5003e3eaa64e3420091b1d41c9890973aa4238f35e0252d59230877b" Dec 05 07:09:58 crc kubenswrapper[4742]: E1205 07:09:58.389971 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 07:10:12 crc kubenswrapper[4742]: I1205 07:10:12.383642 4742 scope.go:117] "RemoveContainer" containerID="9d0dba8a5003e3eaa64e3420091b1d41c9890973aa4238f35e0252d59230877b" Dec 05 07:10:12 crc kubenswrapper[4742]: E1205 07:10:12.384973 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 07:10:24 crc kubenswrapper[4742]: I1205 07:10:24.391240 4742 scope.go:117] "RemoveContainer" containerID="9d0dba8a5003e3eaa64e3420091b1d41c9890973aa4238f35e0252d59230877b" Dec 05 07:10:24 crc kubenswrapper[4742]: E1205 07:10:24.392840 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 07:10:36 crc kubenswrapper[4742]: I1205 07:10:36.383726 4742 scope.go:117] "RemoveContainer" containerID="9d0dba8a5003e3eaa64e3420091b1d41c9890973aa4238f35e0252d59230877b" Dec 05 07:10:36 crc kubenswrapper[4742]: E1205 07:10:36.384920 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 07:10:44 crc kubenswrapper[4742]: I1205 07:10:44.135180 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-lkrx8"] Dec 05 07:10:44 crc kubenswrapper[4742]: E1205 07:10:44.136115 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5b216b7-f4e7-4441-83b2-22cb654aad9e" containerName="gather" Dec 05 07:10:44 crc kubenswrapper[4742]: I1205 07:10:44.136130 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5b216b7-f4e7-4441-83b2-22cb654aad9e" containerName="gather" Dec 05 07:10:44 crc kubenswrapper[4742]: E1205 07:10:44.136163 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5b216b7-f4e7-4441-83b2-22cb654aad9e" containerName="copy" Dec 05 07:10:44 crc kubenswrapper[4742]: I1205 07:10:44.136172 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5b216b7-f4e7-4441-83b2-22cb654aad9e" containerName="copy" Dec 05 07:10:44 crc kubenswrapper[4742]: I1205 07:10:44.136329 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5b216b7-f4e7-4441-83b2-22cb654aad9e" containerName="gather" Dec 05 07:10:44 crc kubenswrapper[4742]: I1205 07:10:44.136347 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5b216b7-f4e7-4441-83b2-22cb654aad9e" containerName="copy" Dec 05 07:10:44 crc kubenswrapper[4742]: I1205 07:10:44.137598 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lkrx8" Dec 05 07:10:44 crc kubenswrapper[4742]: I1205 07:10:44.167184 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lkrx8"] Dec 05 07:10:44 crc kubenswrapper[4742]: I1205 07:10:44.219155 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/628740ba-2ef9-498b-9844-0c86f572bb35-utilities\") pod \"community-operators-lkrx8\" (UID: \"628740ba-2ef9-498b-9844-0c86f572bb35\") " pod="openshift-marketplace/community-operators-lkrx8" Dec 05 07:10:44 crc kubenswrapper[4742]: I1205 07:10:44.219224 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrl9f\" (UniqueName: \"kubernetes.io/projected/628740ba-2ef9-498b-9844-0c86f572bb35-kube-api-access-hrl9f\") pod \"community-operators-lkrx8\" (UID: \"628740ba-2ef9-498b-9844-0c86f572bb35\") " pod="openshift-marketplace/community-operators-lkrx8" Dec 05 07:10:44 crc kubenswrapper[4742]: I1205 07:10:44.219392 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/628740ba-2ef9-498b-9844-0c86f572bb35-catalog-content\") pod \"community-operators-lkrx8\" (UID: \"628740ba-2ef9-498b-9844-0c86f572bb35\") " pod="openshift-marketplace/community-operators-lkrx8" Dec 05 07:10:44 crc kubenswrapper[4742]: I1205 07:10:44.320916 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/628740ba-2ef9-498b-9844-0c86f572bb35-catalog-content\") pod \"community-operators-lkrx8\" (UID: \"628740ba-2ef9-498b-9844-0c86f572bb35\") " pod="openshift-marketplace/community-operators-lkrx8" Dec 05 07:10:44 crc kubenswrapper[4742]: I1205 07:10:44.320990 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/628740ba-2ef9-498b-9844-0c86f572bb35-utilities\") pod \"community-operators-lkrx8\" (UID: \"628740ba-2ef9-498b-9844-0c86f572bb35\") " pod="openshift-marketplace/community-operators-lkrx8" Dec 05 07:10:44 crc kubenswrapper[4742]: I1205 07:10:44.321017 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrl9f\" (UniqueName: \"kubernetes.io/projected/628740ba-2ef9-498b-9844-0c86f572bb35-kube-api-access-hrl9f\") pod \"community-operators-lkrx8\" (UID: \"628740ba-2ef9-498b-9844-0c86f572bb35\") " pod="openshift-marketplace/community-operators-lkrx8" Dec 05 07:10:44 crc kubenswrapper[4742]: I1205 07:10:44.321515 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/628740ba-2ef9-498b-9844-0c86f572bb35-utilities\") pod \"community-operators-lkrx8\" (UID: \"628740ba-2ef9-498b-9844-0c86f572bb35\") " pod="openshift-marketplace/community-operators-lkrx8" Dec 05 07:10:44 crc kubenswrapper[4742]: I1205 07:10:44.321699 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/628740ba-2ef9-498b-9844-0c86f572bb35-catalog-content\") pod \"community-operators-lkrx8\" (UID: \"628740ba-2ef9-498b-9844-0c86f572bb35\") " pod="openshift-marketplace/community-operators-lkrx8" Dec 05 07:10:44 crc kubenswrapper[4742]: I1205 07:10:44.350221 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrl9f\" (UniqueName: \"kubernetes.io/projected/628740ba-2ef9-498b-9844-0c86f572bb35-kube-api-access-hrl9f\") pod \"community-operators-lkrx8\" (UID: \"628740ba-2ef9-498b-9844-0c86f572bb35\") " pod="openshift-marketplace/community-operators-lkrx8" Dec 05 07:10:44 crc kubenswrapper[4742]: I1205 07:10:44.471260 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lkrx8" Dec 05 07:10:45 crc kubenswrapper[4742]: I1205 07:10:45.011192 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lkrx8"] Dec 05 07:10:45 crc kubenswrapper[4742]: I1205 07:10:45.200243 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lkrx8" event={"ID":"628740ba-2ef9-498b-9844-0c86f572bb35","Type":"ContainerStarted","Data":"a05f4fddbdd3e54b3d4a6baef343d1ebd5cf119b255b336e808aeb1690a8cff0"} Dec 05 07:10:46 crc kubenswrapper[4742]: I1205 07:10:46.212347 4742 generic.go:334] "Generic (PLEG): container finished" podID="628740ba-2ef9-498b-9844-0c86f572bb35" containerID="6ebd071460f9c8663190a12fb2a249ace40dd1d4d0592e6a3519a5cec43d37c4" exitCode=0 Dec 05 07:10:46 crc kubenswrapper[4742]: I1205 07:10:46.212446 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lkrx8" event={"ID":"628740ba-2ef9-498b-9844-0c86f572bb35","Type":"ContainerDied","Data":"6ebd071460f9c8663190a12fb2a249ace40dd1d4d0592e6a3519a5cec43d37c4"} Dec 05 07:10:47 crc kubenswrapper[4742]: I1205 07:10:47.224486 4742 generic.go:334] "Generic (PLEG): container finished" podID="628740ba-2ef9-498b-9844-0c86f572bb35" containerID="34e7b367d3a743181d283cc0d41ee0a3ed6ffb470ef9ccdffd2e15e23492cd9d" exitCode=0 Dec 05 07:10:47 crc kubenswrapper[4742]: I1205 07:10:47.224536 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lkrx8" event={"ID":"628740ba-2ef9-498b-9844-0c86f572bb35","Type":"ContainerDied","Data":"34e7b367d3a743181d283cc0d41ee0a3ed6ffb470ef9ccdffd2e15e23492cd9d"} Dec 05 07:10:48 crc kubenswrapper[4742]: I1205 07:10:48.233373 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lkrx8" event={"ID":"628740ba-2ef9-498b-9844-0c86f572bb35","Type":"ContainerStarted","Data":"e08fcbbafe718f508d1073f42c29e3d8dd998271660a5d5873baf0f48599471f"} Dec 05 07:10:48 crc kubenswrapper[4742]: I1205 07:10:48.252753 4742 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-lkrx8" podStartSLOduration=2.660840262 podStartE2EDuration="4.252738443s" podCreationTimestamp="2025-12-05 07:10:44 +0000 UTC" firstStartedPulling="2025-12-05 07:10:46.216581362 +0000 UTC m=+4722.128716464" lastFinishedPulling="2025-12-05 07:10:47.808479573 +0000 UTC m=+4723.720614645" observedRunningTime="2025-12-05 07:10:48.248861221 +0000 UTC m=+4724.160996303" watchObservedRunningTime="2025-12-05 07:10:48.252738443 +0000 UTC m=+4724.164873505" Dec 05 07:10:49 crc kubenswrapper[4742]: I1205 07:10:49.382749 4742 scope.go:117] "RemoveContainer" containerID="9d0dba8a5003e3eaa64e3420091b1d41c9890973aa4238f35e0252d59230877b" Dec 05 07:10:49 crc kubenswrapper[4742]: E1205 07:10:49.383095 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 07:10:54 crc kubenswrapper[4742]: I1205 07:10:54.471361 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-lkrx8" Dec 05 07:10:54 crc kubenswrapper[4742]: I1205 07:10:54.471753 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-lkrx8" Dec 05 07:10:54 crc kubenswrapper[4742]: I1205 07:10:54.555586 4742 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-lkrx8" Dec 05 07:10:55 crc kubenswrapper[4742]: I1205 07:10:55.344889 4742 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-lkrx8" Dec 05 07:10:55 crc kubenswrapper[4742]: I1205 07:10:55.408993 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lkrx8"] Dec 05 07:10:57 crc kubenswrapper[4742]: I1205 07:10:57.321384 4742 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-lkrx8" podUID="628740ba-2ef9-498b-9844-0c86f572bb35" containerName="registry-server" containerID="cri-o://e08fcbbafe718f508d1073f42c29e3d8dd998271660a5d5873baf0f48599471f" gracePeriod=2 Dec 05 07:10:58 crc kubenswrapper[4742]: I1205 07:10:58.338269 4742 generic.go:334] "Generic (PLEG): container finished" podID="628740ba-2ef9-498b-9844-0c86f572bb35" containerID="e08fcbbafe718f508d1073f42c29e3d8dd998271660a5d5873baf0f48599471f" exitCode=0 Dec 05 07:10:58 crc kubenswrapper[4742]: I1205 07:10:58.338367 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lkrx8" event={"ID":"628740ba-2ef9-498b-9844-0c86f572bb35","Type":"ContainerDied","Data":"e08fcbbafe718f508d1073f42c29e3d8dd998271660a5d5873baf0f48599471f"} Dec 05 07:10:58 crc kubenswrapper[4742]: I1205 07:10:58.964443 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lkrx8" Dec 05 07:10:59 crc kubenswrapper[4742]: I1205 07:10:59.083946 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/628740ba-2ef9-498b-9844-0c86f572bb35-catalog-content\") pod \"628740ba-2ef9-498b-9844-0c86f572bb35\" (UID: \"628740ba-2ef9-498b-9844-0c86f572bb35\") " Dec 05 07:10:59 crc kubenswrapper[4742]: I1205 07:10:59.084660 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hrl9f\" (UniqueName: \"kubernetes.io/projected/628740ba-2ef9-498b-9844-0c86f572bb35-kube-api-access-hrl9f\") pod \"628740ba-2ef9-498b-9844-0c86f572bb35\" (UID: \"628740ba-2ef9-498b-9844-0c86f572bb35\") " Dec 05 07:10:59 crc kubenswrapper[4742]: I1205 07:10:59.084843 4742 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/628740ba-2ef9-498b-9844-0c86f572bb35-utilities\") pod \"628740ba-2ef9-498b-9844-0c86f572bb35\" (UID: \"628740ba-2ef9-498b-9844-0c86f572bb35\") " Dec 05 07:10:59 crc kubenswrapper[4742]: I1205 07:10:59.086444 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/628740ba-2ef9-498b-9844-0c86f572bb35-utilities" (OuterVolumeSpecName: "utilities") pod "628740ba-2ef9-498b-9844-0c86f572bb35" (UID: "628740ba-2ef9-498b-9844-0c86f572bb35"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:10:59 crc kubenswrapper[4742]: I1205 07:10:59.086740 4742 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/628740ba-2ef9-498b-9844-0c86f572bb35-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:59 crc kubenswrapper[4742]: I1205 07:10:59.093646 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/628740ba-2ef9-498b-9844-0c86f572bb35-kube-api-access-hrl9f" (OuterVolumeSpecName: "kube-api-access-hrl9f") pod "628740ba-2ef9-498b-9844-0c86f572bb35" (UID: "628740ba-2ef9-498b-9844-0c86f572bb35"). InnerVolumeSpecName "kube-api-access-hrl9f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 07:10:59 crc kubenswrapper[4742]: I1205 07:10:59.151429 4742 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/628740ba-2ef9-498b-9844-0c86f572bb35-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "628740ba-2ef9-498b-9844-0c86f572bb35" (UID: "628740ba-2ef9-498b-9844-0c86f572bb35"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 07:10:59 crc kubenswrapper[4742]: I1205 07:10:59.188820 4742 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hrl9f\" (UniqueName: \"kubernetes.io/projected/628740ba-2ef9-498b-9844-0c86f572bb35-kube-api-access-hrl9f\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:59 crc kubenswrapper[4742]: I1205 07:10:59.188882 4742 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/628740ba-2ef9-498b-9844-0c86f572bb35-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 07:10:59 crc kubenswrapper[4742]: I1205 07:10:59.353112 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lkrx8" event={"ID":"628740ba-2ef9-498b-9844-0c86f572bb35","Type":"ContainerDied","Data":"a05f4fddbdd3e54b3d4a6baef343d1ebd5cf119b255b336e808aeb1690a8cff0"} Dec 05 07:10:59 crc kubenswrapper[4742]: I1205 07:10:59.353201 4742 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lkrx8" Dec 05 07:10:59 crc kubenswrapper[4742]: I1205 07:10:59.353260 4742 scope.go:117] "RemoveContainer" containerID="e08fcbbafe718f508d1073f42c29e3d8dd998271660a5d5873baf0f48599471f" Dec 05 07:10:59 crc kubenswrapper[4742]: I1205 07:10:59.377673 4742 scope.go:117] "RemoveContainer" containerID="34e7b367d3a743181d283cc0d41ee0a3ed6ffb470ef9ccdffd2e15e23492cd9d" Dec 05 07:10:59 crc kubenswrapper[4742]: I1205 07:10:59.418799 4742 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lkrx8"] Dec 05 07:10:59 crc kubenswrapper[4742]: I1205 07:10:59.428426 4742 scope.go:117] "RemoveContainer" containerID="6ebd071460f9c8663190a12fb2a249ace40dd1d4d0592e6a3519a5cec43d37c4" Dec 05 07:10:59 crc kubenswrapper[4742]: I1205 07:10:59.429540 4742 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-lkrx8"] Dec 05 07:11:00 crc kubenswrapper[4742]: I1205 07:11:00.384550 4742 scope.go:117] "RemoveContainer" containerID="9d0dba8a5003e3eaa64e3420091b1d41c9890973aa4238f35e0252d59230877b" Dec 05 07:11:00 crc kubenswrapper[4742]: E1205 07:11:00.384899 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 07:11:00 crc kubenswrapper[4742]: I1205 07:11:00.392225 4742 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="628740ba-2ef9-498b-9844-0c86f572bb35" path="/var/lib/kubelet/pods/628740ba-2ef9-498b-9844-0c86f572bb35/volumes" Dec 05 07:11:13 crc kubenswrapper[4742]: I1205 07:11:13.383587 4742 scope.go:117] "RemoveContainer" containerID="9d0dba8a5003e3eaa64e3420091b1d41c9890973aa4238f35e0252d59230877b" Dec 05 07:11:13 crc kubenswrapper[4742]: E1205 07:11:13.384736 4742 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-7q8lw_openshift-machine-config-operator(3fc0b032-e995-4d0f-b5e7-600b880849f5)\"" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" podUID="3fc0b032-e995-4d0f-b5e7-600b880849f5" Dec 05 07:11:26 crc kubenswrapper[4742]: I1205 07:11:26.382422 4742 scope.go:117] "RemoveContainer" containerID="9d0dba8a5003e3eaa64e3420091b1d41c9890973aa4238f35e0252d59230877b" Dec 05 07:11:26 crc kubenswrapper[4742]: I1205 07:11:26.632647 4742 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-7q8lw" event={"ID":"3fc0b032-e995-4d0f-b5e7-600b880849f5","Type":"ContainerStarted","Data":"5ccb8eb952090b499008b0f62db4045e73b6e76f18bcff2920b2b56d682fdc04"} Dec 05 07:13:19 crc kubenswrapper[4742]: I1205 07:13:19.132990 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-z5r5k"] Dec 05 07:13:19 crc kubenswrapper[4742]: E1205 07:13:19.134129 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="628740ba-2ef9-498b-9844-0c86f572bb35" containerName="registry-server" Dec 05 07:13:19 crc kubenswrapper[4742]: I1205 07:13:19.134158 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="628740ba-2ef9-498b-9844-0c86f572bb35" containerName="registry-server" Dec 05 07:13:19 crc kubenswrapper[4742]: E1205 07:13:19.134182 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="628740ba-2ef9-498b-9844-0c86f572bb35" containerName="extract-utilities" Dec 05 07:13:19 crc kubenswrapper[4742]: I1205 07:13:19.134191 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="628740ba-2ef9-498b-9844-0c86f572bb35" containerName="extract-utilities" Dec 05 07:13:19 crc kubenswrapper[4742]: E1205 07:13:19.134219 4742 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="628740ba-2ef9-498b-9844-0c86f572bb35" containerName="extract-content" Dec 05 07:13:19 crc kubenswrapper[4742]: I1205 07:13:19.134226 4742 state_mem.go:107] "Deleted CPUSet assignment" podUID="628740ba-2ef9-498b-9844-0c86f572bb35" containerName="extract-content" Dec 05 07:13:19 crc kubenswrapper[4742]: I1205 07:13:19.134395 4742 memory_manager.go:354] "RemoveStaleState removing state" podUID="628740ba-2ef9-498b-9844-0c86f572bb35" containerName="registry-server" Dec 05 07:13:19 crc kubenswrapper[4742]: I1205 07:13:19.143114 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-z5r5k" Dec 05 07:13:19 crc kubenswrapper[4742]: I1205 07:13:19.152056 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-z5r5k"] Dec 05 07:13:19 crc kubenswrapper[4742]: I1205 07:13:19.252866 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/946b0d56-1fbb-4e53-9bdc-775d10c4664f-catalog-content\") pod \"redhat-marketplace-z5r5k\" (UID: \"946b0d56-1fbb-4e53-9bdc-775d10c4664f\") " pod="openshift-marketplace/redhat-marketplace-z5r5k" Dec 05 07:13:19 crc kubenswrapper[4742]: I1205 07:13:19.253152 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/946b0d56-1fbb-4e53-9bdc-775d10c4664f-utilities\") pod \"redhat-marketplace-z5r5k\" (UID: \"946b0d56-1fbb-4e53-9bdc-775d10c4664f\") " pod="openshift-marketplace/redhat-marketplace-z5r5k" Dec 05 07:13:19 crc kubenswrapper[4742]: I1205 07:13:19.253424 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kh28g\" (UniqueName: \"kubernetes.io/projected/946b0d56-1fbb-4e53-9bdc-775d10c4664f-kube-api-access-kh28g\") pod \"redhat-marketplace-z5r5k\" (UID: \"946b0d56-1fbb-4e53-9bdc-775d10c4664f\") " pod="openshift-marketplace/redhat-marketplace-z5r5k" Dec 05 07:13:19 crc kubenswrapper[4742]: I1205 07:13:19.285121 4742 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-w8xn2"] Dec 05 07:13:19 crc kubenswrapper[4742]: I1205 07:13:19.286858 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w8xn2" Dec 05 07:13:19 crc kubenswrapper[4742]: I1205 07:13:19.318513 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-w8xn2"] Dec 05 07:13:19 crc kubenswrapper[4742]: I1205 07:13:19.356209 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/946b0d56-1fbb-4e53-9bdc-775d10c4664f-utilities\") pod \"redhat-marketplace-z5r5k\" (UID: \"946b0d56-1fbb-4e53-9bdc-775d10c4664f\") " pod="openshift-marketplace/redhat-marketplace-z5r5k" Dec 05 07:13:19 crc kubenswrapper[4742]: I1205 07:13:19.356411 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgglk\" (UniqueName: \"kubernetes.io/projected/1d360ebd-9ef8-4c72-9fdc-b502e7e00acf-kube-api-access-kgglk\") pod \"redhat-operators-w8xn2\" (UID: \"1d360ebd-9ef8-4c72-9fdc-b502e7e00acf\") " pod="openshift-marketplace/redhat-operators-w8xn2" Dec 05 07:13:19 crc kubenswrapper[4742]: I1205 07:13:19.356691 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d360ebd-9ef8-4c72-9fdc-b502e7e00acf-catalog-content\") pod \"redhat-operators-w8xn2\" (UID: \"1d360ebd-9ef8-4c72-9fdc-b502e7e00acf\") " pod="openshift-marketplace/redhat-operators-w8xn2" Dec 05 07:13:19 crc kubenswrapper[4742]: I1205 07:13:19.357008 4742 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d360ebd-9ef8-4c72-9fdc-b502e7e00acf-utilities\") pod \"redhat-operators-w8xn2\" (UID: \"1d360ebd-9ef8-4c72-9fdc-b502e7e00acf\") " pod="openshift-marketplace/redhat-operators-w8xn2" Dec 05 07:13:19 crc kubenswrapper[4742]: I1205 07:13:19.362034 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kh28g\" (UniqueName: \"kubernetes.io/projected/946b0d56-1fbb-4e53-9bdc-775d10c4664f-kube-api-access-kh28g\") pod \"redhat-marketplace-z5r5k\" (UID: \"946b0d56-1fbb-4e53-9bdc-775d10c4664f\") " pod="openshift-marketplace/redhat-marketplace-z5r5k" Dec 05 07:13:19 crc kubenswrapper[4742]: I1205 07:13:19.362343 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/946b0d56-1fbb-4e53-9bdc-775d10c4664f-catalog-content\") pod \"redhat-marketplace-z5r5k\" (UID: \"946b0d56-1fbb-4e53-9bdc-775d10c4664f\") " pod="openshift-marketplace/redhat-marketplace-z5r5k" Dec 05 07:13:19 crc kubenswrapper[4742]: I1205 07:13:19.363131 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/946b0d56-1fbb-4e53-9bdc-775d10c4664f-catalog-content\") pod \"redhat-marketplace-z5r5k\" (UID: \"946b0d56-1fbb-4e53-9bdc-775d10c4664f\") " pod="openshift-marketplace/redhat-marketplace-z5r5k" Dec 05 07:13:19 crc kubenswrapper[4742]: I1205 07:13:19.363490 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/946b0d56-1fbb-4e53-9bdc-775d10c4664f-utilities\") pod \"redhat-marketplace-z5r5k\" (UID: \"946b0d56-1fbb-4e53-9bdc-775d10c4664f\") " pod="openshift-marketplace/redhat-marketplace-z5r5k" Dec 05 07:13:19 crc kubenswrapper[4742]: I1205 07:13:19.386904 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kh28g\" (UniqueName: \"kubernetes.io/projected/946b0d56-1fbb-4e53-9bdc-775d10c4664f-kube-api-access-kh28g\") pod \"redhat-marketplace-z5r5k\" (UID: \"946b0d56-1fbb-4e53-9bdc-775d10c4664f\") " pod="openshift-marketplace/redhat-marketplace-z5r5k" Dec 05 07:13:19 crc kubenswrapper[4742]: I1205 07:13:19.464006 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgglk\" (UniqueName: \"kubernetes.io/projected/1d360ebd-9ef8-4c72-9fdc-b502e7e00acf-kube-api-access-kgglk\") pod \"redhat-operators-w8xn2\" (UID: \"1d360ebd-9ef8-4c72-9fdc-b502e7e00acf\") " pod="openshift-marketplace/redhat-operators-w8xn2" Dec 05 07:13:19 crc kubenswrapper[4742]: I1205 07:13:19.464099 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d360ebd-9ef8-4c72-9fdc-b502e7e00acf-catalog-content\") pod \"redhat-operators-w8xn2\" (UID: \"1d360ebd-9ef8-4c72-9fdc-b502e7e00acf\") " pod="openshift-marketplace/redhat-operators-w8xn2" Dec 05 07:13:19 crc kubenswrapper[4742]: I1205 07:13:19.464133 4742 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d360ebd-9ef8-4c72-9fdc-b502e7e00acf-utilities\") pod \"redhat-operators-w8xn2\" (UID: \"1d360ebd-9ef8-4c72-9fdc-b502e7e00acf\") " pod="openshift-marketplace/redhat-operators-w8xn2" Dec 05 07:13:19 crc kubenswrapper[4742]: I1205 07:13:19.464549 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d360ebd-9ef8-4c72-9fdc-b502e7e00acf-catalog-content\") pod \"redhat-operators-w8xn2\" (UID: \"1d360ebd-9ef8-4c72-9fdc-b502e7e00acf\") " pod="openshift-marketplace/redhat-operators-w8xn2" Dec 05 07:13:19 crc kubenswrapper[4742]: I1205 07:13:19.464623 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d360ebd-9ef8-4c72-9fdc-b502e7e00acf-utilities\") pod \"redhat-operators-w8xn2\" (UID: \"1d360ebd-9ef8-4c72-9fdc-b502e7e00acf\") " pod="openshift-marketplace/redhat-operators-w8xn2" Dec 05 07:13:19 crc kubenswrapper[4742]: I1205 07:13:19.484918 4742 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgglk\" (UniqueName: \"kubernetes.io/projected/1d360ebd-9ef8-4c72-9fdc-b502e7e00acf-kube-api-access-kgglk\") pod \"redhat-operators-w8xn2\" (UID: \"1d360ebd-9ef8-4c72-9fdc-b502e7e00acf\") " pod="openshift-marketplace/redhat-operators-w8xn2" Dec 05 07:13:19 crc kubenswrapper[4742]: I1205 07:13:19.485499 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-z5r5k" Dec 05 07:13:19 crc kubenswrapper[4742]: I1205 07:13:19.604787 4742 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w8xn2" Dec 05 07:13:19 crc kubenswrapper[4742]: I1205 07:13:19.947013 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-z5r5k"] Dec 05 07:13:20 crc kubenswrapper[4742]: I1205 07:13:20.058545 4742 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-w8xn2"] Dec 05 07:13:20 crc kubenswrapper[4742]: W1205 07:13:20.059640 4742 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1d360ebd_9ef8_4c72_9fdc_b502e7e00acf.slice/crio-afdb664e8f1f84a302e091d3213aeeb6cb03bc726eace592f3d1866cb27eb5f5 WatchSource:0}: Error finding container afdb664e8f1f84a302e091d3213aeeb6cb03bc726eace592f3d1866cb27eb5f5: Status 404 returned error can't find the container with id afdb664e8f1f84a302e091d3213aeeb6cb03bc726eace592f3d1866cb27eb5f5 var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515114503026024442 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015114503027017360 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015114471057016512 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015114471057015462 5ustar corecore